From 00c668e2fe7602d0c3ad7502c54cdca04af03639 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:28:33 +0800 Subject: [PATCH 01/27] feat: add sdk package and update version propagation in sync-versions tests --- .githooks/sync-versions.test.ts | 45 +++++++++++- Cargo.lock | 9 +++ Cargo.toml | 3 +- pnpm-lock.yaml | 117 ++++++++++++++++++++++---------- pnpm-workspace.yaml | 1 + turbo.json | 2 +- 6 files changed, 137 insertions(+), 40 deletions(-) diff --git a/.githooks/sync-versions.test.ts b/.githooks/sync-versions.test.ts index 2e8c6b69..451235a0 100644 --- a/.githooks/sync-versions.test.ts +++ b/.githooks/sync-versions.test.ts @@ -35,6 +35,11 @@ function createFixtureRepo(): string { name: '@truenine/memory-sync-cli', version: initialVersion }) + writeJson(join(rootDir, 'sdk', 'package.json'), { + name: '@truenine/memory-sync-sdk', + version: initialVersion, + private: true + }) writeJson(join(rootDir, 'cli', 'npm', 'darwin-arm64', 'package.json'), { name: '@truenine/memory-sync-cli-darwin-arm64', version: initialVersion @@ -101,6 +106,43 @@ describe('sync-versions hook', () => { expect(result.versionSource).toBe('cli/npm/darwin-arm64/package.json') expect(JSON.parse(readFileSync(join(rootDir, 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) expect(JSON.parse(readFileSync(join(rootDir, 'cli', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(JSON.parse(readFileSync(join(rootDir, 'sdk', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(JSON.parse(readFileSync(join(rootDir, 'libraries', 'logger', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(readFileSync(join(rootDir, 'Cargo.toml'), 'utf-8')).toContain(`version = "${nextVersion}"`) + expect(readFileSync(join(rootDir, 'cli-crate', 'Cargo.toml'), 'utf-8')).toContain(`version = "${nextVersion}"`) + expect(JSON.parse(readFileSync(join(rootDir, 'gui', 'src-tauri', 'tauri.conf.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(stagedFiles).toEqual(new Set([ + 'Cargo.toml', + 'cli-crate/Cargo.toml', + 'cli/npm/darwin-arm64/package.json', + 'cli/package.json', + 'gui/src-tauri/tauri.conf.json', + 'libraries/logger/package.json', + 'package.json', + 'sdk/package.json' + ])) + }) + + it('accepts sdk/package.json as a staged version source and propagates it', () => { + const rootDir = createFixtureRepo() + tempDirs.push(rootDir) + + const nextVersion = '2026.10324.10316' + writeJson(join(rootDir, 'sdk', 'package.json'), { + name: '@truenine/memory-sync-sdk', + version: nextVersion, + private: true + }) + runGit(rootDir, ['add', 'sdk/package.json']) + + const result = runSyncVersions({rootDir}) + const stagedFiles = new Set(runGit(rootDir, ['diff', '--cached', '--name-only']).split(/\r?\n/).filter(Boolean)) + + expect(result.targetVersion).toBe(nextVersion) + expect(result.versionSource).toBe('sdk/package.json') + expect(JSON.parse(readFileSync(join(rootDir, 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(JSON.parse(readFileSync(join(rootDir, 'cli', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(JSON.parse(readFileSync(join(rootDir, 'sdk', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) expect(JSON.parse(readFileSync(join(rootDir, 'libraries', 'logger', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) expect(readFileSync(join(rootDir, 'Cargo.toml'), 'utf-8')).toContain(`version = "${nextVersion}"`) expect(readFileSync(join(rootDir, 'cli-crate', 'Cargo.toml'), 'utf-8')).toContain(`version = "${nextVersion}"`) @@ -112,7 +154,8 @@ describe('sync-versions hook', () => { 'cli/package.json', 'gui/src-tauri/tauri.conf.json', 'libraries/logger/package.json', - 'package.json' + 'package.json', + 'sdk/package.json' ])) }) diff --git a/Cargo.lock b/Cargo.lock index 601b13e0..0eedf855 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4456,6 +4456,15 @@ dependencies = [ "walkdir", ] +[[package]] +name = "tnmsc-cli-shell" +version = "2026.10330.118" +dependencies = [ + "clap", + "tnmsc", + "tnmsc-logger", +] + [[package]] name = "tnmsc-logger" version = "2026.10330.118" diff --git a/Cargo.toml b/Cargo.toml index 6cc71d83..78978a84 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,7 @@ [workspace] resolver = "2" members = [ + "sdk", "cli", "libraries/logger", "libraries/md-compiler", @@ -18,7 +19,7 @@ repository = "https://github.com/TrueNine/memory-sync" [workspace.dependencies] # Internal crates -tnmsc = { path = "cli" } +tnmsc = { path = "sdk" } tnmsc-logger = { path = "libraries/logger" } tnmsc-md-compiler = { path = "libraries/md-compiler" } tnmsc-script-runtime = { path = "libraries/script-runtime" } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 167b8017..14fed2c5 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -327,54 +327,36 @@ importers: specifier: 'catalog:' version: 4.3.6 devDependencies: - '@clack/prompts': + '@truenine/eslint10-config': specifier: 'catalog:' - version: 1.1.0 - '@truenine/logger': - specifier: workspace:* - version: link:../libraries/logger - '@truenine/md-compiler': - specifier: workspace:* - version: link:../libraries/md-compiler - '@truenine/script-runtime': + version: 2026.10326.11646(3147283cf2a5f1693493984073c80bb9) + '@truenine/memory-sync-sdk': specifier: workspace:* - version: link:../libraries/script-runtime - '@types/fs-extra': - specifier: 'catalog:' - version: 11.0.4 - '@types/picomatch': + version: link:../sdk + '@types/node': specifier: 'catalog:' - version: 4.0.2 + version: 25.5.0 '@vitest/coverage-v8': specifier: 'catalog:' version: 4.1.2(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))) - fast-glob: - specifier: 'catalog:' - version: 3.3.3 - fs-extra: - specifier: 'catalog:' - version: 11.3.4 - jiti: - specifier: 'catalog:' - version: 2.6.1 - lightningcss: + eslint: specifier: 'catalog:' - version: 1.32.0 - picocolors: + version: 10.1.0(jiti@2.6.1) + npm-run-all2: specifier: 'catalog:' - version: 1.1.1 - picomatch: + version: 8.0.4 + tsdown: specifier: 'catalog:' - version: 4.0.4 + version: 0.21.7(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(synckit@0.11.12)(typescript@6.0.2) tsx: specifier: 'catalog:' version: 4.21.0 + typescript: + specifier: 'catalog:' + version: 6.0.2 vitest: specifier: 'catalog:' version: 4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) - zod-to-json-schema: - specifier: 'catalog:' - version: 3.25.2(zod@4.3.6) optionalDependencies: '@truenine/memory-sync-cli-darwin-arm64': specifier: workspace:* @@ -642,9 +624,9 @@ importers: '@modelcontextprotocol/sdk': specifier: 'catalog:' version: 1.28.0(zod@4.3.6) - '@truenine/memory-sync-cli': + '@truenine/memory-sync-sdk': specifier: workspace:* - version: link:../cli + version: link:../sdk zod: specifier: 'catalog:' version: 4.3.6 @@ -674,6 +656,67 @@ importers: specifier: 'catalog:' version: 4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) + sdk: + dependencies: + json5: + specifier: 'catalog:' + version: 2.2.3 + yaml: + specifier: 'catalog:' + version: 2.8.3 + zod: + specifier: 'catalog:' + version: 4.3.6 + devDependencies: + '@clack/prompts': + specifier: 'catalog:' + version: 1.1.0 + '@truenine/logger': + specifier: workspace:* + version: link:../libraries/logger + '@truenine/md-compiler': + specifier: workspace:* + version: link:../libraries/md-compiler + '@truenine/script-runtime': + specifier: workspace:* + version: link:../libraries/script-runtime + '@types/fs-extra': + specifier: 'catalog:' + version: 11.0.4 + '@types/picomatch': + specifier: 'catalog:' + version: 4.0.2 + '@vitest/coverage-v8': + specifier: 'catalog:' + version: 4.1.2(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))) + fast-glob: + specifier: 'catalog:' + version: 3.3.3 + fs-extra: + specifier: 'catalog:' + version: 11.3.4 + jiti: + specifier: 'catalog:' + version: 2.6.1 + lightningcss: + specifier: 'catalog:' + version: 1.32.0 + picocolors: + specifier: 'catalog:' + version: 1.1.1 + picomatch: + specifier: 'catalog:' + version: 4.0.4 + tsx: + specifier: 'catalog:' + version: 4.21.0 + vitest: + specifier: 'catalog:' + version: 4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) + zod-to-json-schema: + specifier: 'catalog:' + version: 3.25.2(zod@4.3.6) + packages: '@antfu/eslint-config@7.7.3': @@ -8095,8 +8138,8 @@ snapshots: '@typescript-eslint/project-service@8.56.1(typescript@6.0.2)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.56.1(typescript@6.0.2) - '@typescript-eslint/types': 8.56.1 + '@typescript-eslint/tsconfig-utils': 8.57.2(typescript@6.0.2) + '@typescript-eslint/types': 8.57.2 debug: 4.4.3 typescript: 6.0.2 transitivePeerDependencies: diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index f11b11b2..4e0bcfbe 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,4 +1,5 @@ packages: + - sdk - cli - cli/npm/* - mcp diff --git a/turbo.json b/turbo.json index 4c77dbc3..f84f8440 100644 --- a/turbo.json +++ b/turbo.json @@ -17,7 +17,7 @@ "outputs": [] }, "typecheck": { - "dependsOn": ["build", "^build"], + "dependsOn": ["^build"], "outputs": [] } } From 3185066758a65bb817f35ed31326873cafa5eb5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:28:44 +0800 Subject: [PATCH 02/27] feat: enhance documentation and structure for SDK integration and responsibilities --- doc/content/_meta.ts | 3 + doc/content/cli/plugin-config.mdx | 2 +- doc/content/gui/index.mdx | 4 +- doc/content/index.mdx | 10 ++-- doc/content/mcp/index.mdx | 4 +- doc/content/sdk/_meta.ts | 3 + doc/content/sdk/index.mdx | 56 +++++++++++++++++++ .../technical-details/architecture.mdx | 11 +++- doc/lib/site.ts | 7 ++- 9 files changed, 87 insertions(+), 13 deletions(-) create mode 100644 doc/content/sdk/_meta.ts create mode 100644 doc/content/sdk/index.mdx diff --git a/doc/content/_meta.ts b/doc/content/_meta.ts index e9e73e88..cb49373f 100644 --- a/doc/content/_meta.ts +++ b/doc/content/_meta.ts @@ -5,6 +5,9 @@ export default { 'cli': { title: 'CLI' }, + 'sdk': { + title: 'SDK' + }, 'mcp': { title: 'MCP' }, diff --git a/doc/content/cli/plugin-config.mdx b/doc/content/cli/plugin-config.mdx index 707a9291..1e0bf530 100644 --- a/doc/content/cli/plugin-config.mdx +++ b/doc/content/cli/plugin-config.mdx @@ -19,7 +19,7 @@ status: stable ## 当前默认输出插件 -当前仓库 `cli/src/plugin.config.ts` 默认装配的输出插件包括: +当前仓库 `sdk/src/plugin.config.ts` 默认装配的输出插件包括: - `AgentsOutputPlugin` - `ClaudeCodeCLIOutputPlugin` diff --git a/doc/content/gui/index.mdx b/doc/content/gui/index.mdx index b386fe22..53cf36ae 100644 --- a/doc/content/gui/index.mdx +++ b/doc/content/gui/index.mdx @@ -1,13 +1,13 @@ --- title: GUI -description: 说明 Tauri 桌面层的职责、边界,以及它与 tnmsc crate / CLI 的关系。 +description: 说明 Tauri 桌面层的职责、边界,以及它与 sdk / tnmsc crate / CLI 的关系。 sidebarTitle: 概览 status: stable --- # GUI -`gui/` 是基于 Tauri + React 的桌面调用层。它的角色不是成为系统架构中心,而是把 `tnmsc` 的配置编辑、执行、展示与日志观察做成桌面工作流。 +`gui/` 是基于 Tauri + React 的桌面调用层。它的角色不是成为系统架构中心,而是把 `sdk/` 中 `tnmsc` crate 暴露的配置编辑、执行、展示与日志观察做成桌面工作流。 ## 这层负责什么 diff --git a/doc/content/index.mdx b/doc/content/index.mdx index 37e8564f..caf1eb2f 100644 --- a/doc/content/index.mdx +++ b/doc/content/index.mdx @@ -1,6 +1,6 @@ --- title: overview -description: memory-sync 文档入口,按 CLI、MCP、GUI、技术细节与设计初衷五个一级门类组织。 +description: memory-sync 文档入口,按 CLI、SDK、MCP、GUI、技术细节与设计初衷六个一级门类组织。 sidebarTitle: overview status: stable keywords: @@ -11,21 +11,23 @@ keywords: # 文档总览 -这套文档站围绕当前仓库事实组织,一级结构固定为 `CLI`、`MCP`、`GUI`、`技术细节`、`设计初衷`。这样写文档时,面向用户的操作入口、面向集成的 server、桌面层说明、实现原理和背景动机不再混在一起。 +这套文档站围绕当前仓库事实组织,一级结构固定为 `CLI`、`SDK`、`MCP`、`GUI`、`技术细节`、`设计初衷`。这样写文档时,面向用户的操作入口、repo 内部 mixed core、面向集成的 server、桌面层说明、实现原理和背景动机不再混在一起。 -## 五个一级门类 +## 六个一级门类 | 门类 | 主要回答的问题 | 入口 | | --- | --- | --- | | CLI | 如何安装、准备项目、执行同步、理解命令与配置字段 | [CLI](/docs/cli) | +| SDK | `sdk/` 为什么是 mixed core、拥有哪些能力、内部消费者该如何依赖它 | [SDK](/docs/sdk) | | MCP | `memory-sync-mcp` 是什么、暴露了哪些工具、适合怎么接入 | [MCP](/docs/mcp) | -| GUI | 桌面层负责什么、有哪些页面、它与 `tnmsc` crate / CLI 如何配合 | [GUI](/docs/gui) | +| GUI | 桌面层负责什么、有哪些页面、它与 `sdk/` / `tnmsc` crate / CLI 如何配合 | [GUI](/docs/gui) | | 技术细节 | 架构边界、同步管线、真源模型,以及 prompts / skills / commands / rules 等输入资产如何组织 | [技术细节](/docs/technical-details) | | 设计初衷 | 为什么要做这个项目,为什么文档也要这样分层 | [设计初衷](/docs/design-rationale) | ## 从哪里开始 - 第一次使用 `memory-sync`,从 [CLI](/docs/cli) 开始,先把安装、项目准备和第一次同步跑通。 +- 需要理解 repo 内部核心层怎么分工、为什么以后统一复用 `sdk/`,先看 [SDK](/docs/sdk)。 - 需要把 `memory-sync-mcp` 接入到支持 MCP 的宿主里,直接进 [MCP](/docs/mcp)。 - 关注桌面应用而不是终端入口时,查看 [GUI](/docs/gui)。 - 需要理解 Rust-first / NAPI-first、真源模型和输入资产职责时,进入 [技术细节](/docs/technical-details)。 diff --git a/doc/content/mcp/index.mdx b/doc/content/mcp/index.mdx index 38df0ecc..d75a160b 100644 --- a/doc/content/mcp/index.mdx +++ b/doc/content/mcp/index.mdx @@ -1,6 +1,6 @@ --- title: MCP -description: 说明 memory-sync-mcp 的定位、运行方式,以及它与 CLI prompt service 的关系。 +description: 说明 memory-sync-mcp 的定位、运行方式,以及它与 sdk prompt service 的关系。 sidebarTitle: 概览 status: stable --- @@ -12,7 +12,7 @@ status: stable ## 它负责什么 - 作为 MCP stdio server 暴露 `memory-sync` 的 prompt 管理能力 -- 复用 `@truenine/memory-sync-cli` 导出的 prompt service,而不是重新实现一套独立逻辑 +- 复用 `@truenine/memory-sync-sdk` 导出的 prompt service,而不是重新实现一套独立逻辑 - 让支持 MCP 的宿主按工具调用方式读取、更新和写回 prompt 资产 ## 它不负责什么 diff --git a/doc/content/sdk/_meta.ts b/doc/content/sdk/_meta.ts new file mode 100644 index 00000000..be04a93e --- /dev/null +++ b/doc/content/sdk/_meta.ts @@ -0,0 +1,3 @@ +export default { + 'index': '概览' +} diff --git a/doc/content/sdk/index.mdx b/doc/content/sdk/index.mdx new file mode 100644 index 00000000..39744afb --- /dev/null +++ b/doc/content/sdk/index.mdx @@ -0,0 +1,56 @@ +--- +title: SDK +description: 说明 sdk/ 作为 private mixed core 的职责、边界、消费方向与对外身份保持策略。 +sidebarTitle: 概览 +status: stable +--- + +# SDK + +`sdk/` 是当前仓库里的 private mixed core,也是 repo 内部共享能力的单一入口。它不是一个“给外部用户单独安装的 npm 包”,而是把原先放在 `cli/` 里的核心实现收拢后的真源层。 + +## 这层负责什么 + +- 承载私有 npm 包 `@truenine/memory-sync-sdk` +- 承载 Rust crate `tnmsc` 的实际工作区路径 +- 持有 TypeScript 同步管线、prompt service、schema 生成与 `plugin-runtime` +- 持有 Rust library、NAPI 构建、Node bridge runtime 与嵌入式运行时逻辑 +- 作为 `mcp/`、`gui/` 以及未来 repo 内部消费者的默认依赖点 + +## 这层不负责什么 + +- 不直接承担公开 npm CLI 的发布入口 +- 不承担平台 shim 包 `@truenine/memory-sync-cli-` 的分发身份 +- 不把 repo 内部共享 API 再挂回 `cli/` 做二次真源 + +## 身份保持不变的部分 + +这次分层调整没有改动这些公开身份: + +- CLI binary 仍然是 `tnmsc` +- 公共 npm CLI 包仍然是 `@truenine/memory-sync-cli` +- 公共平台包仍然是 `@truenine/memory-sync-cli-` +- Rust crate 名仍然是 `tnmsc` + +变化的是“路径与所有权”,不是这些对外身份本身。 + +## 消费方向 + +| 消费方 | 依赖方式 | +| --- | --- | +| `cli/` | 作为 shell 与兼容层,薄封装 `sdk/` 导出的能力 | +| `mcp/` | 直接导入 `@truenine/memory-sync-sdk` 的 prompt service | +| `gui/src-tauri` | 继续依赖 crate `tnmsc`,但 crate 实际路径位于 `sdk/` | + +## 边界规则 + +- repo 内部新代码不应再把 `cli/` 当成默认共享 API 入口 +- `cli/` 只保留命令入口、兼容导出与发布打包职责 +- 需要解释实现边界时,先看 `sdk/`,再看 `cli/`、`mcp/` 或 `gui/` + +## 推荐阅读 + +- [技术细节 / 架构边界](/docs/technical-details/architecture):查看 `sdk/`、`cli/`、`mcp/`、`gui/` 的整体分层。 +- [CLI](/docs/cli):查看公开命令入口、安装与兼容发布表面。 +- [MCP](/docs/mcp):查看 `sdk` prompt service 如何被 stdio server 消费。 +- [GUI](/docs/gui):查看桌面层如何调用 `sdk/` 中的 `tnmsc` crate。 diff --git a/doc/content/technical-details/architecture.mdx b/doc/content/technical-details/architecture.mdx index 124326a3..719c0f99 100644 --- a/doc/content/technical-details/architecture.mdx +++ b/doc/content/technical-details/architecture.mdx @@ -9,7 +9,8 @@ status: stable 当前仓库的核心方向不是“继续堆更多纯 TypeScript 兼容层”,而是: -- `cli/` 作为公开入口与编排层 +- `sdk/` 作为私有 mixed core +- `cli/` 作为公开入口与兼容发布层 - Rust crate / NAPI 作为长期核心实现重心 - TypeScript 主要负责接口暴露、配置装配、桥接运行时与声明描述 @@ -17,8 +18,9 @@ status: stable | 组件 | 职责 | | --- | --- | -| `cli/` | `tnmsc` 命令入口、crate 暴露、npm 包暴露 | -| `mcp/` | MCP stdio server,复用 CLI prompt service | +| `sdk/` | `tnmsc` crate、Node bridge runtime、prompt service、schema 与 NAPI 真源 | +| `cli/` | `tnmsc` 命令入口、公开 npm CLI 包、兼容发布壳 | +| `mcp/` | MCP stdio server,复用 sdk prompt service | | `gui/` | Tauri 桌面调用层与展示层 | | `libraries/` | Rust-first / NAPI-first 基础库 | @@ -26,6 +28,9 @@ status: stable - GUI 不是核心实现中心 - MCP 不是新的真源模型 +- CLI 不是 repo 内部共享 API 的默认依赖点 - 文档站不是架构真相的唯一来源 如果你需要看用户操作面,回到 [CLI](/docs/cli) 或 [GUI](/docs/gui);这里只说明实现分层为什么这样划。 + +如果你需要专门看 `sdk/` 这层本身的职责、消费方向与身份保持策略,直接进入 [SDK](/docs/sdk)。 diff --git a/doc/lib/site.ts b/doc/lib/site.ts index 85c78e6d..431e43c7 100644 --- a/doc/lib/site.ts +++ b/doc/lib/site.ts @@ -36,6 +36,11 @@ export const homeEntryCards = [ title: 'CLI', detail: '围绕安装、项目准备、第一次同步、配置字段与命令表面组织。' }, + { + href: '/docs/sdk', + title: 'SDK', + detail: '单独说明 private mixed core 的职责边界、消费方向,以及它与 tnmsc crate / cli shell 的关系。' + }, { href: '/docs/mcp', title: 'MCP', @@ -44,7 +49,7 @@ export const homeEntryCards = [ { href: '/docs/gui', title: 'GUI', - detail: '单独查看桌面层的职责、页面结构,以及它与 tnmsc crate / CLI 的关系。' + detail: '单独查看桌面层的职责、页面结构,以及它与 sdk / tnmsc crate / CLI 的关系。' }, { href: '/docs/technical-details', From 21f45231f286290edb5035cb340a542a5db07915 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:28:58 +0800 Subject: [PATCH 03/27] feat: replace 'cli' with 'sdk' in native module configurations --- scripts/build-native.ts | 2 +- scripts/copy-napi.ts | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/build-native.ts b/scripts/build-native.ts index 97911b5c..dab0fa69 100644 --- a/scripts/build-native.ts +++ b/scripts/build-native.ts @@ -10,7 +10,7 @@ const NATIVE_MODULES = [ {name: 'logger', dir: 'libraries/logger'}, {name: 'md-compiler', dir: 'libraries/md-compiler'}, {name: 'script-runtime', dir: 'libraries/script-runtime'}, - {name: 'cli', dir: 'cli'}, + {name: 'sdk', dir: 'sdk'}, ] as const const __dirname = import.meta.dirname ?? dirname(fileURLToPath(import.meta.url)) diff --git a/scripts/copy-napi.ts b/scripts/copy-napi.ts index f6cdc168..b0efaddc 100644 --- a/scripts/copy-napi.ts +++ b/scripts/copy-napi.ts @@ -8,7 +8,7 @@ const NATIVE_MODULES = [ {name: 'logger', distDir: 'libraries/logger/dist'}, {name: 'md-compiler', distDir: 'libraries/md-compiler/dist'}, {name: 'script-runtime', distDir: 'libraries/script-runtime/dist'}, - {name: 'cli', distDir: 'cli/dist'}, + {name: 'sdk', distDir: 'sdk/dist'}, ] as const const PLATFORM_MAP: Record = { @@ -113,5 +113,5 @@ if (copied > 0) { console.warn('[copy-napi] No .node files found. Build napi first:') console.warn(' pnpm -F @truenine/logger run build:native') console.warn(' pnpm -F @truenine/md-compiler run build:native') - console.warn(' pnpm -C cli exec napi build --platform --release --output-dir dist -- --features napi') + console.warn(' pnpm -F @truenine/memory-sync-sdk run build:native') } From 265cdae53b50d0034eb6b400f99f6f2cdf7e0c4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:29:28 +0800 Subject: [PATCH 04/27] feat: add initial SDK configuration files and build script --- sdk/.npmignore | 5 +++++ sdk/Cargo.toml | 41 +++++++++++++++++++++++++++++++++++++++++ sdk/build.rs | 47 +++++++++++++++++++++++++++++++++++++++++++++++ sdk/env.d.ts | 22 ++++++++++++++++++++++ 4 files changed, 115 insertions(+) create mode 100644 sdk/.npmignore create mode 100644 sdk/Cargo.toml create mode 100644 sdk/build.rs create mode 100644 sdk/env.d.ts diff --git a/sdk/.npmignore b/sdk/.npmignore new file mode 100644 index 00000000..02bb2142 --- /dev/null +++ b/sdk/.npmignore @@ -0,0 +1,5 @@ +* +!dist +!dist/ +!package.json +!README.md diff --git a/sdk/Cargo.toml b/sdk/Cargo.toml new file mode 100644 index 00000000..2b9d4e84 --- /dev/null +++ b/sdk/Cargo.toml @@ -0,0 +1,41 @@ +[package] +name = "tnmsc" +description = "Cross-AI-tool prompt synchronisation CLI" +version.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true + +[lib] +name = "tnmsc" +path = "src/lib.rs" +crate-type = ["rlib", "cdylib"] + +[features] +default = [] +embedded-runtime = [] +napi = ["dep:napi", "dep:napi-derive"] + +[dependencies] +tnmsc-logger = { workspace = true } +tnmsc-md-compiler = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +thiserror = "2.0.18" +clap = { workspace = true } +dirs = { workspace = true } +sha2 = { workspace = true } +napi = { workspace = true, optional = true } +napi-derive = { workspace = true, optional = true } +reqwest = { version = "0.13.2", default-features = false, features = ["blocking", "json", "rustls"] } +globset = "0.4.18" +walkdir = "2.5.0" + +[dev-dependencies] +proptest = "1.11.0" +tempfile = "3.27.0" + +[build-dependencies] +napi-build = { workspace = true } diff --git a/sdk/build.rs b/sdk/build.rs new file mode 100644 index 00000000..807864da --- /dev/null +++ b/sdk/build.rs @@ -0,0 +1,47 @@ +use std::env; +use std::fs; +use std::path::{Path, PathBuf}; + +fn main() { + #[cfg(feature = "napi")] + napi_build::setup(); + + // Check if embedded-runtime feature is enabled via CARGO_FEATURE_* env var + // Note: #[cfg(feature = ...)] doesn't work in build.rs at runtime, + // we must check the environment variable set by cargo + if env::var("CARGO_FEATURE_EMBEDDED_RUNTIME").is_ok() { + let out_dir = env::var("OUT_DIR").expect("OUT_DIR not set"); + let dest = Path::new(&out_dir).join("plugin-runtime.mjs"); + + // Try multiple possible locations for plugin-runtime.mjs + let possible_sources = vec![ + // Already built in sdk/dist + PathBuf::from("dist/plugin-runtime.mjs"), + // From repo root + PathBuf::from("sdk/dist/plugin-runtime.mjs"), + // CI workspace path (when building from repo root) + PathBuf::from("../sdk/dist/plugin-runtime.mjs"), + ]; + + let mut found = false; + for src in &possible_sources { + if src.exists() { + fs::copy(src, &dest).expect("Failed to copy plugin-runtime.mjs"); + println!("cargo:rerun-if-changed={}", src.display()); + found = true; + break; + } + } + + if !found { + panic!( + "plugin-runtime.mjs not found for embedded-runtime feature. \ + Please build it first with: pnpm -F @truenine/memory-sync-sdk exec tsdown \ + Searched paths: {:?}", + possible_sources + ); + } + + println!("cargo:rerun-if-changed=build.rs"); + } +} diff --git a/sdk/env.d.ts b/sdk/env.d.ts new file mode 100644 index 00000000..df4bb8c8 --- /dev/null +++ b/sdk/env.d.ts @@ -0,0 +1,22 @@ +/// +/// +/// + +/** + * CLI version injected at build time from package.json + */ +declare const __CLI_VERSION__: string + +/** + * CLI package name injected at build time from package.json + */ +declare const __CLI_PACKAGE_NAME__: string + +/** + * Kiro global powers registry JSON string injected at build time + */ +declare const __KIRO_GLOBAL_POWERS_REGISTRY__: string + +interface GlobalThis { + __TNMSC_TEST_NATIVE_BINDING__?: object +} From 274de3ae1f21d28200131a8d88f5e8e70d1c7f33 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:30:00 +0800 Subject: [PATCH 05/27] feat: add initial SDK configuration files including eslint, tsconfig, and build scripts --- sdk/eslint.config.ts | 45 +++++++++++++ sdk/package.json | 95 ++++++++++++++++++++++++++ sdk/tsconfig.eslint.json | 20 ++++++ sdk/tsconfig.json | 82 ++++++++++++++++++++++ sdk/tsconfig.lib.json | 22 ++++++ sdk/tsconfig.test.json | 26 +++++++ sdk/tsdown.config.ts | 142 +++++++++++++++++++++++++++++++++++++++ sdk/vite.config.ts | 75 +++++++++++++++++++++ sdk/vitest.config.ts | 34 ++++++++++ 9 files changed, 541 insertions(+) create mode 100644 sdk/eslint.config.ts create mode 100644 sdk/package.json create mode 100644 sdk/tsconfig.eslint.json create mode 100644 sdk/tsconfig.json create mode 100644 sdk/tsconfig.lib.json create mode 100644 sdk/tsconfig.test.json create mode 100644 sdk/tsdown.config.ts create mode 100644 sdk/vite.config.ts create mode 100644 sdk/vitest.config.ts diff --git a/sdk/eslint.config.ts b/sdk/eslint.config.ts new file mode 100644 index 00000000..9c891393 --- /dev/null +++ b/sdk/eslint.config.ts @@ -0,0 +1,45 @@ +import {dirname, resolve} from 'node:path' +import {fileURLToPath} from 'node:url' + +import eslint10 from '@truenine/eslint10-config' + +const configDir = dirname(fileURLToPath(import.meta.url)) + +const config = await eslint10({ + type: 'lib', + typescript: { + strictTypescriptEslint: true, + tsconfigPath: resolve(configDir, 'tsconfig.eslint.json'), + parserOptions: { + allowDefaultProject: ['*.config.ts', 'test/**/*.ts'] + } + }, + ignores: [ + '.turbo/**', + 'aindex/**', + 'npm/**/noop.cjs', + 'npm/**/noop.d.ts', + '*.md', + '**/*.md', + '*.toml', + '**/*.toml', + '.kiro/**', + '.claude/**', + '.factory/**', + 'src/AGENTS.md', + '.skills/**', + '**/.skills/**', + '.agent/**', + 'scripts/**' + ] +}) + +const overrides = { + files: ['src/**/*.ts', 'src/**/*.tsx'], + rules: { + 'e18e/prefer-static-regex': 'off', + 'ts/member-ordering': 'off' + } +} + +export default [...config, overrides] as unknown diff --git a/sdk/package.json b/sdk/package.json new file mode 100644 index 00000000..3c4eda81 --- /dev/null +++ b/sdk/package.json @@ -0,0 +1,95 @@ +{ + "name": "@truenine/memory-sync-sdk", + "type": "module", + "version": "2026.10330.118", + "private": true, + "description": "TrueNine Memory Synchronization SDK", + "author": "TrueNine", + "license": "AGPL-3.0-only", + "homepage": "https://github.com/TrueNine/memory-sync", + "repository": { + "type": "git", + "url": "git+https://github.com/TrueNine/memory-sync.git", + "directory": "sdk" + }, + "exports": { + ".": { + "types": "./dist/index.d.mts", + "import": "./dist/index.mjs" + }, + "./globals": { + "types": "./dist/globals.d.mts", + "import": "./dist/globals.mjs" + }, + "./schema.json": "./dist/tnmsc.schema.json", + "./package.json": "./package.json" + }, + "main": "./dist/index.mjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "files": [ + "dist", + "dist/tnmsc.schema.json" + ], + "napi": { + "binaryName": "napi-memory-sync-cli", + "targets": [ + "x86_64-pc-windows-msvc", + "x86_64-unknown-linux-gnu", + "aarch64-unknown-linux-gnu", + "aarch64-apple-darwin", + "x86_64-apple-darwin" + ] + }, + "scripts": { + "build": "run-s build:deps build:napi bundle finalize:bundle generate:schema", + "build:napi": "run-s build:native build:napi:copy", + "build:napi:copy": "tsx ../scripts/copy-napi.ts", + "build:native": "napi build --platform --release --output-dir dist -- --features napi", + "build:deps": "run-s build:deps:runtime build:deps:md-compiler", + "build:deps:runtime": "pnpm -F @truenine/logger -F @truenine/script-runtime run build", + "build:deps:md-compiler": "run-s build:deps:md-compiler:ts build:deps:md-compiler:native", + "build:deps:md-compiler:ts": "pnpm -F @truenine/md-compiler run build:ts", + "build:deps:md-compiler:native": "pnpm -F @truenine/md-compiler run build:native", + "build:deps:ts": "pnpm -F @truenine/logger -F @truenine/md-compiler -F @truenine/script-runtime run build:ts", + "bundle": "tsx ../scripts/build-quiet.ts", + "check": "run-s build:deps:ts check:run", + "check:run": "run-p lint:run typecheck:run", + "finalize:bundle": "tsx scripts/finalize-bundle.ts", + "generate:schema": "tsx scripts/generate-schema.ts", + "lint": "run-s build:deps:ts lint:run", + "lint:run": "eslint --cache --cache-location node_modules/.cache/.eslintcache .", + "prepublishOnly": "run-s build check", + "test": "run-s build:deps test:run", + "test:native-cleanup-smoke": "tsx scripts/cleanup-native-smoke.ts", + "test:run": "vitest run", + "benchmark:cleanup": "tsx scripts/benchmark-cleanup.ts", + "lintfix": "run-s build:deps:ts lintfix:run", + "lintfix:run": "eslint --fix --cache --cache-location node_modules/.cache/.eslintcache .", + "typecheck": "run-s build:deps:ts typecheck:run", + "typecheck:run": "tsc --noEmit -p tsconfig.lib.json" + }, + "dependencies": { + "json5": "catalog:", + "yaml": "catalog:", + "zod": "catalog:" + }, + "devDependencies": { + "@clack/prompts": "catalog:", + "@truenine/logger": "workspace:*", + "@truenine/md-compiler": "workspace:*", + "@truenine/script-runtime": "workspace:*", + "@types/fs-extra": "catalog:", + "@types/picomatch": "catalog:", + "@vitest/coverage-v8": "catalog:", + "fast-glob": "catalog:", + "fs-extra": "catalog:", + "jiti": "catalog:", + "lightningcss": "catalog:", + "picocolors": "catalog:", + "picomatch": "catalog:", + "tsx": "catalog:", + "vitest": "catalog:", + "zod-to-json-schema": "catalog:" + } +} diff --git a/sdk/tsconfig.eslint.json b/sdk/tsconfig.eslint.json new file mode 100644 index 00000000..62f8268e --- /dev/null +++ b/sdk/tsconfig.eslint.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "extends": "./tsconfig.json", + "compilerOptions": { + "noEmit": true, + "skipLibCheck": true + }, + "include": [ + "src/**/*.ts", + "src/**/*.test.ts", + "src/**/*.spec.ts", + "test/**/*.ts", + "env.d.ts", + "eslint.config.ts", + "tsdown.config.ts", + "vite.config.ts", + "vitest.config.ts" + ], + "exclude": ["../node_modules", "dist", "coverage"] +} diff --git a/sdk/tsconfig.json b/sdk/tsconfig.json new file mode 100644 index 00000000..9006c87e --- /dev/null +++ b/sdk/tsconfig.json @@ -0,0 +1,82 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "compilerOptions": { + "noUncheckedSideEffectImports": true, + "incremental": true, + "composite": false, + "target": "ESNext", + "lib": ["ESNext"], + "moduleDetection": "force", + "useDefineForClassFields": true, + "module": "ESNext", + "moduleResolution": "Bundler", + "paths": { + "@/*": ["./src/*"], + "@truenine/desk-paths": ["./src/core/desk-paths.ts"], + "@truenine/desk-paths/*": ["./src/core/desk-paths/*"], + "@truenine/plugin-output-shared": ["./src/plugins/plugin-output-shared/index.ts"], + "@truenine/plugin-output-shared/*": ["./src/plugins/plugin-output-shared/*"], + "@truenine/plugin-input-shared": ["./src/plugins/plugin-input-shared/index.ts"], + "@truenine/plugin-input-shared/*": ["./src/plugins/plugin-input-shared/*"], + "@truenine/plugin-agentskills-compact": ["./src/plugins/plugin-agentskills-compact.ts"], + "@truenine/plugin-agentsmd": ["./src/plugins/plugin-agentsmd.ts"], + "@truenine/plugin-antigravity": ["./src/plugins/plugin-antigravity/index.ts"], + "@truenine/plugin-claude-code-cli": ["./src/plugins/plugin-claude-code-cli.ts"], + "@truenine/plugin-cursor": ["./src/plugins/plugin-cursor.ts"], + "@truenine/plugin-droid-cli": ["./src/plugins/plugin-droid-cli.ts"], + "@truenine/plugin-editorconfig": ["./src/plugins/plugin-editorconfig.ts"], + "@truenine/plugin-gemini-cli": ["./src/plugins/plugin-gemini-cli.ts"], + "@truenine/plugin-git-exclude": ["./src/plugins/plugin-git-exclude.ts"], + "@truenine/plugin-jetbrains-ai-codex": ["./src/plugins/plugin-jetbrains-ai-codex.ts"], + "@truenine/plugin-jetbrains-codestyle": ["./src/plugins/plugin-jetbrains-codestyle.ts"], + "@truenine/plugin-openai-codex-cli": ["./src/plugins/plugin-openai-codex-cli.ts"], + "@truenine/plugin-opencode-cli": ["./src/plugins/plugin-opencode-cli.ts"], + "@truenine/plugin-qoder-ide": ["./src/plugins/plugin-qoder-ide.ts"], + "@truenine/plugin-readme": ["./src/plugins/plugin-readme.ts"], + "@truenine/plugin-trae-ide": ["./src/plugins/plugin-trae-ide.ts"], + "@truenine/plugin-vscode": ["./src/plugins/plugin-vscode.ts"], + "@truenine/plugin-warp-ide": ["./src/plugins/plugin-warp-ide.ts"], + "@truenine/plugin-windsurf": ["./src/plugins/plugin-windsurf.ts"], + "@truenine/plugin-zed": ["./src/plugins/plugin-zed.ts"] + }, + "resolveJsonModule": true, + "types": ["node"], + "allowImportingTsExtensions": true, + "strict": true, + "strictBindCallApply": true, + "strictFunctionTypes": true, + "strictNullChecks": true, + "strictPropertyInitialization": true, + "allowUnreachableCode": false, + "allowUnusedLabels": false, + "alwaysStrict": true, + "exactOptionalPropertyTypes": true, + "noFallthroughCasesInSwitch": true, + "noImplicitAny": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noImplicitThis": true, + "noPropertyAccessFromIndexSignature": true, + "noUncheckedIndexedAccess": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "useUnknownInCatchVariables": true, + "declaration": true, + "declarationMap": true, + "importHelpers": true, + "newLine": "lf", + "noEmit": true, + "noEmitHelpers": false, + "removeComments": false, + "sourceMap": true, + "stripInternal": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "isolatedModules": true, + "verbatimModuleSyntax": true, + "skipLibCheck": true + }, + "include": ["src/**/*", "test/**/*.ts", "env.d.ts", "eslint.config.ts", "tsdown.config.ts", "vite.config.ts", "vitest.config.ts"], + "exclude": ["../node_modules", "dist"] +} diff --git a/sdk/tsconfig.lib.json b/sdk/tsconfig.lib.json new file mode 100644 index 00000000..5597f4de --- /dev/null +++ b/sdk/tsconfig.lib.json @@ -0,0 +1,22 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "extends": "./tsconfig.json", + "compilerOptions": { + "composite": true, + "rootDir": ".", + "noEmit": false, + "outDir": "../dist", + "skipLibCheck": true + }, + "include": [ + "src/**/*", + "env.d.ts", + "tsdown.config.ts" + ], + "exclude": [ + "../node_modules", + "dist", + "**/*.spec.ts", + "**/*.test.ts" + ] +} diff --git a/sdk/tsconfig.test.json b/sdk/tsconfig.test.json new file mode 100644 index 00000000..094bf0e6 --- /dev/null +++ b/sdk/tsconfig.test.json @@ -0,0 +1,26 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "extends": "./tsconfig.json", + "compilerOptions": { + "lib": [ + "ESNext", + "DOM" + ], + "types": [ + "vitest/globals", + "node" + ] + }, + "include": [ + "src/**/*.spec.ts", + "src/**/*.test.ts", + "test/**/*.ts", + "vitest.config.ts", + "vite.config.ts", + "env.d.ts" + ], + "exclude": [ + "../node_modules", + "dist" + ] +} diff --git a/sdk/tsdown.config.ts b/sdk/tsdown.config.ts new file mode 100644 index 00000000..183d9c5c --- /dev/null +++ b/sdk/tsdown.config.ts @@ -0,0 +1,142 @@ +import {readFileSync} from 'node:fs' +import {resolve} from 'node:path' +import {defineConfig} from 'tsdown' + +const pkg = JSON.parse(readFileSync('./package.json', 'utf8')) as {version: string, name: string} +const kiroGlobalPowersRegistry = '{"version":"1.0.0","powers":{},"repoSources":{}}' + +const pluginAliases: Record = { + '@truenine/desk-paths': resolve('src/core/desk-paths.ts'), + '@truenine/plugin-output-shared': resolve('src/plugins/plugin-output-shared/index.ts'), + '@truenine/plugin-input-shared': resolve('src/plugins/plugin-input-shared/index.ts'), + '@truenine/plugin-agentskills-compact': resolve('src/plugins/plugin-agentskills-compact.ts'), + '@truenine/plugin-agentsmd': resolve('src/plugins/plugin-agentsmd.ts'), + '@truenine/plugin-antigravity': resolve('src/plugins/plugin-antigravity/index.ts'), + '@truenine/plugin-claude-code-cli': resolve('src/plugins/plugin-claude-code-cli.ts'), + '@truenine/plugin-cursor': resolve('src/plugins/plugin-cursor.ts'), + '@truenine/plugin-droid-cli': resolve('src/plugins/plugin-droid-cli.ts'), + '@truenine/plugin-editorconfig': resolve('src/plugins/plugin-editorconfig.ts'), + '@truenine/plugin-gemini-cli': resolve('src/plugins/plugin-gemini-cli.ts'), + '@truenine/plugin-git-exclude': resolve('src/plugins/plugin-git-exclude.ts'), + '@truenine/plugin-input-agentskills': resolve('src/plugins/plugin-input-agentskills/index.ts'), + '@truenine/plugin-input-editorconfig': resolve('src/plugins/plugin-input-editorconfig/index.ts'), + '@truenine/plugin-input-fast-command': resolve('src/plugins/plugin-input-fast-command/index.ts'), + '@truenine/plugin-input-git-exclude': resolve('src/plugins/plugin-input-git-exclude/index.ts'), + '@truenine/plugin-input-gitignore': resolve('src/plugins/plugin-input-gitignore/index.ts'), + '@truenine/plugin-input-global-memory': resolve('src/plugins/plugin-input-global-memory/index.ts'), + '@truenine/plugin-input-jetbrains-config': resolve('src/plugins/plugin-input-jetbrains-config/index.ts'), + '@truenine/plugin-input-md-cleanup-effect': resolve('src/plugins/plugin-input-md-cleanup-effect/index.ts'), + '@truenine/plugin-input-orphan-cleanup-effect': resolve('src/plugins/plugin-input-orphan-cleanup-effect/index.ts'), + '@truenine/plugin-input-project-prompt': resolve('src/plugins/plugin-input-project-prompt/index.ts'), + '@truenine/plugin-input-readme': resolve('src/plugins/plugin-input-readme/index.ts'), + '@truenine/plugin-input-rule': resolve('src/plugins/plugin-input-rule/index.ts'), + '@truenine/plugin-input-shadow-project': resolve('src/plugins/plugin-input-shadow-project/index.ts'), + '@truenine/plugin-input-shared-ignore': resolve('src/plugins/plugin-input-shared-ignore/index.ts'), + '@truenine/plugin-input-skill-sync-effect': resolve('src/plugins/plugin-input-skill-sync-effect/index.ts'), + '@truenine/plugin-input-subagent': resolve('src/plugins/plugin-input-subagent/index.ts'), + '@truenine/plugin-input-vscode-config': resolve('src/plugins/plugin-input-vscode-config/index.ts'), + '@truenine/plugin-input-workspace': resolve('src/plugins/plugin-input-workspace/index.ts'), + '@truenine/plugin-jetbrains-ai-codex': resolve('src/plugins/plugin-jetbrains-ai-codex.ts'), + '@truenine/plugin-jetbrains-codestyle': resolve('src/plugins/plugin-jetbrains-codestyle.ts'), + '@truenine/plugin-openai-codex-cli': resolve('src/plugins/plugin-openai-codex-cli.ts'), + '@truenine/plugin-opencode-cli': resolve('src/plugins/plugin-opencode-cli.ts'), + '@truenine/plugin-qoder-ide': resolve('src/plugins/plugin-qoder-ide.ts'), + '@truenine/plugin-readme': resolve('src/plugins/plugin-readme.ts'), + '@truenine/plugin-trae-ide': resolve('src/plugins/plugin-trae-ide.ts'), + '@truenine/plugin-vscode': resolve('src/plugins/plugin-vscode.ts'), + '@truenine/plugin-warp-ide': resolve('src/plugins/plugin-warp-ide.ts'), + '@truenine/plugin-windsurf': resolve('src/plugins/plugin-windsurf.ts'), + '@truenine/plugin-zed': resolve('src/plugins/plugin-zed.ts') +} + +const noExternalDeps = [ + '@truenine/logger', + '@truenine/script-runtime', + 'fast-glob', + 'jiti', + '@truenine/desk-paths', + '@truenine/md-compiler', + ...Object.keys(pluginAliases) +] + +export default defineConfig([ + { + entry: ['./src/index.ts', '!**/*.{spec,test}.*'], + platform: 'node', + sourcemap: false, + unbundle: false, + deps: { + onlyBundle: false + }, + alias: { + '@': resolve('src'), + ...pluginAliases + }, + noExternal: noExternalDeps, + format: ['esm'], + minify: true, + dts: {sourcemap: false}, + outputOptions: {exports: 'named'}, + define: { + __CLI_VERSION__: JSON.stringify(pkg.version), + __CLI_PACKAGE_NAME__: JSON.stringify(pkg.name), + __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry + } + }, + { + entry: ['./src/plugin-runtime.ts'], + platform: 'node', + sourcemap: false, + unbundle: false, + deps: { + onlyBundle: false + }, + alias: { + '@': resolve('src'), + ...pluginAliases + }, + noExternal: noExternalDeps, + format: ['esm'], + minify: true, + dts: false, + define: { + __CLI_VERSION__: JSON.stringify(pkg.version), + __CLI_PACKAGE_NAME__: JSON.stringify(pkg.name), + __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry + } + }, + { + entry: ['./src/script-runtime-worker.ts'], + platform: 'node', + sourcemap: false, + unbundle: false, + deps: { + onlyBundle: false + }, + alias: { + '@': resolve('src'), + ...pluginAliases + }, + noExternal: noExternalDeps, + format: ['esm'], + minify: false, + dts: false, + define: { + __CLI_VERSION__: JSON.stringify(pkg.version), + __CLI_PACKAGE_NAME__: JSON.stringify(pkg.name), + __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry + } + }, + { + entry: ['./src/globals.ts'], + platform: 'node', + sourcemap: false, + alias: { + '@': resolve('src'), + ...pluginAliases + }, + format: ['esm'], + minify: false, + dts: {sourcemap: false} + } +]) diff --git a/sdk/vite.config.ts b/sdk/vite.config.ts new file mode 100644 index 00000000..1c390295 --- /dev/null +++ b/sdk/vite.config.ts @@ -0,0 +1,75 @@ +import {readFileSync} from 'node:fs' +import {resolve} from 'node:path' +import {fileURLToPath, URL} from 'node:url' +import {defineConfig} from 'vite' + +const pkg = JSON.parse(readFileSync('./package.json', 'utf8')) as {version: string, name: string} +const kiroGlobalPowersRegistry = '{"version":"1.0.0","powers":{},"repoSources":{}}' +const workspacePackageAliases: Record = { + '@truenine/md-compiler/errors': resolve('../libraries/md-compiler/dist/errors/index.mjs'), + '@truenine/md-compiler/globals': resolve('../libraries/md-compiler/dist/globals/index.mjs'), + '@truenine/md-compiler/markdown': resolve('../libraries/md-compiler/dist/markdown/index.mjs'), + '@truenine/md-compiler': resolve('../libraries/md-compiler/dist/index.mjs') +} + +const pluginAliases: Record = { + '@truenine/desk-paths': resolve('src/core/desk-paths.ts'), + '@truenine/plugin-output-shared': resolve('src/plugins/plugin-output-shared/index.ts'), + '@truenine/plugin-output-shared/utils': resolve('src/plugins/plugin-output-shared/utils/index.ts'), + '@truenine/plugin-output-shared/registry': resolve('src/plugins/plugin-output-shared/registry/index.ts'), + '@truenine/plugin-input-shared': resolve('src/plugins/plugin-input-shared/index.ts'), + '@truenine/plugin-input-shared/scope': resolve('src/plugins/plugin-input-shared/scope/index.ts'), + '@truenine/plugin-agentskills-compact': resolve('src/plugins/plugin-agentskills-compact.ts'), + '@truenine/plugin-agentsmd': resolve('src/plugins/plugin-agentsmd.ts'), + '@truenine/plugin-antigravity': resolve('src/plugins/plugin-antigravity/index.ts'), + '@truenine/plugin-claude-code-cli': resolve('src/plugins/plugin-claude-code-cli.ts'), + '@truenine/plugin-cursor': resolve('src/plugins/plugin-cursor.ts'), + '@truenine/plugin-droid-cli': resolve('src/plugins/plugin-droid-cli.ts'), + '@truenine/plugin-editorconfig': resolve('src/plugins/plugin-editorconfig.ts'), + '@truenine/plugin-gemini-cli': resolve('src/plugins/plugin-gemini-cli.ts'), + '@truenine/plugin-git-exclude': resolve('src/plugins/plugin-git-exclude.ts'), + '@truenine/plugin-input-agentskills': resolve('src/plugins/plugin-input-agentskills/index.ts'), + '@truenine/plugin-input-editorconfig': resolve('src/plugins/plugin-input-editorconfig/index.ts'), + '@truenine/plugin-input-fast-command': resolve('src/plugins/plugin-input-fast-command/index.ts'), + '@truenine/plugin-input-git-exclude': resolve('src/plugins/plugin-input-git-exclude/index.ts'), + '@truenine/plugin-input-gitignore': resolve('src/plugins/plugin-input-gitignore/index.ts'), + '@truenine/plugin-input-global-memory': resolve('src/plugins/plugin-input-global-memory/index.ts'), + '@truenine/plugin-input-jetbrains-config': resolve('src/plugins/plugin-input-jetbrains-config/index.ts'), + '@truenine/plugin-input-md-cleanup-effect': resolve('src/plugins/plugin-input-md-cleanup-effect/index.ts'), + '@truenine/plugin-input-orphan-cleanup-effect': resolve('src/plugins/plugin-input-orphan-cleanup-effect/index.ts'), + '@truenine/plugin-input-project-prompt': resolve('src/plugins/plugin-input-project-prompt/index.ts'), + '@truenine/plugin-input-readme': resolve('src/plugins/plugin-input-readme/index.ts'), + '@truenine/plugin-input-rule': resolve('src/plugins/plugin-input-rule/index.ts'), + '@truenine/plugin-input-shadow-project': resolve('src/plugins/plugin-input-shadow-project/index.ts'), + '@truenine/plugin-input-shared-ignore': resolve('src/plugins/plugin-input-shared-ignore/index.ts'), + '@truenine/plugin-input-skill-sync-effect': resolve('src/plugins/plugin-input-skill-sync-effect/index.ts'), + '@truenine/plugin-input-subagent': resolve('src/plugins/plugin-input-subagent/index.ts'), + '@truenine/plugin-input-vscode-config': resolve('src/plugins/plugin-input-vscode-config/index.ts'), + '@truenine/plugin-input-workspace': resolve('src/plugins/plugin-input-workspace/index.ts'), + '@truenine/plugin-jetbrains-ai-codex': resolve('src/plugins/plugin-jetbrains-ai-codex.ts'), + '@truenine/plugin-jetbrains-codestyle': resolve('src/plugins/plugin-jetbrains-codestyle.ts'), + '@truenine/plugin-openai-codex-cli': resolve('src/plugins/plugin-openai-codex-cli.ts'), + '@truenine/plugin-opencode-cli': resolve('src/plugins/plugin-opencode-cli.ts'), + '@truenine/plugin-qoder-ide': resolve('src/plugins/plugin-qoder-ide.ts'), + '@truenine/plugin-readme': resolve('src/plugins/plugin-readme.ts'), + '@truenine/plugin-trae-ide': resolve('src/plugins/plugin-trae-ide.ts'), + '@truenine/plugin-vscode': resolve('src/plugins/plugin-vscode.ts'), + '@truenine/plugin-warp-ide': resolve('src/plugins/plugin-warp-ide.ts'), + '@truenine/plugin-windsurf': resolve('src/plugins/plugin-windsurf.ts'), + '@truenine/plugin-zed': resolve('src/plugins/plugin-zed.ts') +} + +export default defineConfig({ + resolve: { + alias: { + '@': fileURLToPath(new URL('./src', import.meta.url)), + ...workspacePackageAliases, + ...pluginAliases + } + }, + define: { + __CLI_VERSION__: JSON.stringify(pkg.version), + __CLI_PACKAGE_NAME__: JSON.stringify(pkg.name), + __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry + } +}) diff --git a/sdk/vitest.config.ts b/sdk/vitest.config.ts new file mode 100644 index 00000000..c80ffd11 --- /dev/null +++ b/sdk/vitest.config.ts @@ -0,0 +1,34 @@ +import {fileURLToPath} from 'node:url' + +import {configDefaults, defineConfig, mergeConfig} from 'vitest/config' + +import viteConfig from './vite.config' + +export default mergeConfig( + viteConfig, + defineConfig({ + test: { + environment: 'node', + passWithNoTests: true, + exclude: [...configDefaults.exclude, 'e2e/*'], + root: fileURLToPath(new URL('./', import.meta.url)), + setupFiles: ['./test/setup-native-binding.ts'], + typecheck: { + enabled: true, + tsconfig: './tsconfig.test.json' + }, + testTimeout: 30000, + onConsoleLog: () => false, + coverage: { + provider: 'v8', + reporter: ['text', 'json', 'html'], + exclude: [ + 'node_modules/', + 'dist/', + '**/*.test.ts', + '**/*.property.test.ts' + ] + } + } + }) +) From 8ffbf417477cfecc505a9e5fbc8a587897a88893 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:30:24 +0800 Subject: [PATCH 06/27] feat: update references from memory-sync-cli to memory-sync-sdk in config validation and server files --- gui/src/utils/configValidation.ts | 6 +++--- mcp/package.json | 2 +- mcp/src/server.ts | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/gui/src/utils/configValidation.ts b/gui/src/utils/configValidation.ts index e9957464..63d5a95b 100644 --- a/gui/src/utils/configValidation.ts +++ b/gui/src/utils/configValidation.ts @@ -1,9 +1,9 @@ /** * Config validation for the UI frontend. * - * Replicates the validation logic from Core_CLI's `validateConfigStrict` - * (memory-sync-cli/src/ConfigLoader.ts) so the webview can validate - * config objects before saving — without importing from the CLI package + * Replicates the validation logic from the sdk config loader + * (`sdk/src/ConfigLoader.ts`) so the webview can validate + * config objects before saving — without importing from the sdk package * directly (different runtime context). */ diff --git a/mcp/package.json b/mcp/package.json index f30a8807..16cf8994 100644 --- a/mcp/package.json +++ b/mcp/package.json @@ -42,7 +42,7 @@ }, "dependencies": { "@modelcontextprotocol/sdk": "catalog:", - "@truenine/memory-sync-cli": "workspace:*", + "@truenine/memory-sync-sdk": "workspace:*", "zod": "catalog:" }, "devDependencies": { diff --git a/mcp/src/server.ts b/mcp/src/server.ts index dbbefac0..7e5d8cca 100644 --- a/mcp/src/server.ts +++ b/mcp/src/server.ts @@ -4,10 +4,10 @@ import type { PromptArtifactState, PromptServiceOptions, PromptSourceLocale -} from '@truenine/memory-sync-cli' +} from '@truenine/memory-sync-sdk' import {McpServer} from '@modelcontextprotocol/sdk/server/mcp.js' import {StdioServerTransport} from '@modelcontextprotocol/sdk/server/stdio.js' -import {getPrompt, listPrompts, upsertPromptSource, writePromptArtifacts} from '@truenine/memory-sync-cli' +import {getPrompt, listPrompts, upsertPromptSource, writePromptArtifacts} from '@truenine/memory-sync-sdk' import {z} from 'zod' const promptKindSchema = z.enum([ From 5e4415f0db6a4c28e6254dbc4a964384ad00cda7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:31:09 +0800 Subject: [PATCH 07/27] feat: rename CLI package and update related configurations for SDK integration --- cli/Cargo.toml | 33 ++---------- cli/build.rs | 47 ----------------- cli/package.json | 49 +++++++----------- cli/tsdown.config.ts | 121 ++----------------------------------------- cli/vite.config.ts | 74 +------------------------- cli/vitest.config.ts | 2 +- 6 files changed, 28 insertions(+), 298 deletions(-) delete mode 100644 cli/build.rs diff --git a/cli/Cargo.toml b/cli/Cargo.toml index fec9e6a2..b8f34c00 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "tnmsc" -description = "Cross-AI-tool prompt synchronisation CLI" +name = "tnmsc-cli-shell" +description = "Cross-AI-tool prompt synchronisation CLI shell" version.workspace = true edition.workspace = true rust-version.workspace = true @@ -8,38 +8,11 @@ license.workspace = true authors.workspace = true repository.workspace = true -[lib] -name = "tnmsc" -path = "src/lib.rs" -crate-type = ["rlib", "cdylib"] - [[bin]] name = "tnmsc" path = "src/main.rs" -[features] -default = [] -embedded-runtime = [] -napi = ["dep:napi", "dep:napi-derive"] - [dependencies] +tnmsc = { workspace = true } tnmsc-logger = { workspace = true } -tnmsc-md-compiler = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -thiserror = "2.0.18" clap = { workspace = true } -dirs = { workspace = true } -sha2 = { workspace = true } -napi = { workspace = true, optional = true } -napi-derive = { workspace = true, optional = true } -reqwest = { version = "0.13.2", default-features = false, features = ["blocking", "json", "rustls"] } -globset = "0.4.18" -walkdir = "2.5.0" - -[dev-dependencies] -proptest = "1.11.0" -tempfile = "3.27.0" - -[build-dependencies] -napi-build = { workspace = true } diff --git a/cli/build.rs b/cli/build.rs deleted file mode 100644 index ffcc7d99..00000000 --- a/cli/build.rs +++ /dev/null @@ -1,47 +0,0 @@ -use std::env; -use std::fs; -use std::path::{Path, PathBuf}; - -fn main() { - #[cfg(feature = "napi")] - napi_build::setup(); - - // Check if embedded-runtime feature is enabled via CARGO_FEATURE_* env var - // Note: #[cfg(feature = ...)] doesn't work in build.rs at runtime, - // we must check the environment variable set by cargo - if env::var("CARGO_FEATURE_EMBEDDED_RUNTIME").is_ok() { - let out_dir = env::var("OUT_DIR").expect("OUT_DIR not set"); - let dest = Path::new(&out_dir).join("plugin-runtime.mjs"); - - // Try multiple possible locations for plugin-runtime.mjs - let possible_sources = vec![ - // Already built in cli/dist - PathBuf::from("dist/plugin-runtime.mjs"), - // From repo root - PathBuf::from("cli/dist/plugin-runtime.mjs"), - // CI workspace path (when building from repo root) - PathBuf::from("../cli/dist/plugin-runtime.mjs"), - ]; - - let mut found = false; - for src in &possible_sources { - if src.exists() { - fs::copy(src, &dest).expect("Failed to copy plugin-runtime.mjs"); - println!("cargo:rerun-if-changed={}", src.display()); - found = true; - break; - } - } - - if !found { - panic!( - "plugin-runtime.mjs not found for embedded-runtime feature. \ - Please build it first with: pnpm -C cli exec tsdown \ - Searched paths: {:?}", - possible_sources - ); - } - - println!("cargo:rerun-if-changed=build.rs"); - } -} diff --git a/cli/package.json b/cli/package.json index 3f7f6a72..2a148297 100644 --- a/cli/package.json +++ b/cli/package.json @@ -2,7 +2,7 @@ "name": "@truenine/memory-sync-cli", "type": "module", "version": "2026.10330.118", - "description": "TrueNine Memory Synchronization CLI", + "description": "TrueNine Memory Synchronization CLI shell", "author": "TrueNine", "license": "AGPL-3.0-only", "homepage": "https://github.com/TrueNine/memory-sync", @@ -48,27 +48,22 @@ "registry": "https://registry.npmjs.org/" }, "scripts": { - "build": "run-s build:deps build:napi bundle finalize:bundle generate:schema", - "build:napi": "run-s build:native build:napi:copy", + "build": "run-s build:sdk build:shell sync:sdk-assets", + "build:sdk": "pnpm -F @truenine/memory-sync-sdk run build", "build:napi:copy": "tsx ../scripts/copy-napi.ts", - "build:native": "napi build --platform --release --output-dir dist -- --features napi", - "build:deps": "pnpm -F @truenine/logger -F @truenine/md-compiler -F @truenine/script-runtime run build", - "build:deps:ts": "pnpm -F @truenine/logger -F @truenine/md-compiler -F @truenine/script-runtime run build:ts", - "bundle": "tsx ../scripts/build-quiet.ts", - "check": "run-s build:deps:ts check:run", + "build:shell": "tsdown", + "ensure:sdk-build": "tsx scripts/ensure-sdk-build.ts", + "sync:sdk-assets": "tsx scripts/sync-sdk-dist.ts", + "check": "run-s ensure:sdk-build check:run", "check:run": "run-p lint:run typecheck:run", - "finalize:bundle": "tsx scripts/finalize-bundle.ts", - "generate:schema": "tsx scripts/generate-schema.ts", - "lint": "run-s build:deps:ts lint:run", + "lint": "run-s ensure:sdk-build lint:run", "lint:run": "eslint --cache --cache-location node_modules/.cache/.eslintcache .", "prepublishOnly": "run-s build check", - "test": "run-s build:deps test:run", - "test:native-cleanup-smoke": "tsx scripts/cleanup-native-smoke.ts", + "test": "run-s ensure:sdk-build test:run", "test:run": "vitest run", - "benchmark:cleanup": "tsx scripts/benchmark-cleanup.ts", - "lintfix": "run-s build:deps:ts lintfix:run", + "lintfix": "run-s ensure:sdk-build lintfix:run", "lintfix:run": "eslint --fix --cache --cache-location node_modules/.cache/.eslintcache .", - "typecheck": "run-s build:deps:ts typecheck:run", + "typecheck": "run-s ensure:sdk-build typecheck:run", "typecheck:run": "tsc --noEmit -p tsconfig.lib.json" }, "dependencies": { @@ -84,21 +79,15 @@ "@truenine/memory-sync-cli-win32-x64-msvc": "workspace:*" }, "devDependencies": { - "@clack/prompts": "catalog:", - "@truenine/logger": "workspace:*", - "@truenine/md-compiler": "workspace:*", - "@truenine/script-runtime": "workspace:*", - "@types/fs-extra": "catalog:", - "@types/picomatch": "catalog:", + "@truenine/eslint10-config": "catalog:", + "@truenine/memory-sync-sdk": "workspace:*", + "@types/node": "catalog:", "@vitest/coverage-v8": "catalog:", - "fast-glob": "catalog:", - "fs-extra": "catalog:", - "jiti": "catalog:", - "lightningcss": "catalog:", - "picocolors": "catalog:", - "picomatch": "catalog:", + "eslint": "catalog:", + "npm-run-all2": "catalog:", + "tsdown": "catalog:", "tsx": "catalog:", - "vitest": "catalog:", - "zod-to-json-schema": "catalog:" + "typescript": "catalog:", + "vitest": "catalog:" } } diff --git a/cli/tsdown.config.ts b/cli/tsdown.config.ts index 183d9c5c..675f8756 100644 --- a/cli/tsdown.config.ts +++ b/cli/tsdown.config.ts @@ -1,140 +1,27 @@ -import {readFileSync} from 'node:fs' -import {resolve} from 'node:path' import {defineConfig} from 'tsdown' -const pkg = JSON.parse(readFileSync('./package.json', 'utf8')) as {version: string, name: string} -const kiroGlobalPowersRegistry = '{"version":"1.0.0","powers":{},"repoSources":{}}' - -const pluginAliases: Record = { - '@truenine/desk-paths': resolve('src/core/desk-paths.ts'), - '@truenine/plugin-output-shared': resolve('src/plugins/plugin-output-shared/index.ts'), - '@truenine/plugin-input-shared': resolve('src/plugins/plugin-input-shared/index.ts'), - '@truenine/plugin-agentskills-compact': resolve('src/plugins/plugin-agentskills-compact.ts'), - '@truenine/plugin-agentsmd': resolve('src/plugins/plugin-agentsmd.ts'), - '@truenine/plugin-antigravity': resolve('src/plugins/plugin-antigravity/index.ts'), - '@truenine/plugin-claude-code-cli': resolve('src/plugins/plugin-claude-code-cli.ts'), - '@truenine/plugin-cursor': resolve('src/plugins/plugin-cursor.ts'), - '@truenine/plugin-droid-cli': resolve('src/plugins/plugin-droid-cli.ts'), - '@truenine/plugin-editorconfig': resolve('src/plugins/plugin-editorconfig.ts'), - '@truenine/plugin-gemini-cli': resolve('src/plugins/plugin-gemini-cli.ts'), - '@truenine/plugin-git-exclude': resolve('src/plugins/plugin-git-exclude.ts'), - '@truenine/plugin-input-agentskills': resolve('src/plugins/plugin-input-agentskills/index.ts'), - '@truenine/plugin-input-editorconfig': resolve('src/plugins/plugin-input-editorconfig/index.ts'), - '@truenine/plugin-input-fast-command': resolve('src/plugins/plugin-input-fast-command/index.ts'), - '@truenine/plugin-input-git-exclude': resolve('src/plugins/plugin-input-git-exclude/index.ts'), - '@truenine/plugin-input-gitignore': resolve('src/plugins/plugin-input-gitignore/index.ts'), - '@truenine/plugin-input-global-memory': resolve('src/plugins/plugin-input-global-memory/index.ts'), - '@truenine/plugin-input-jetbrains-config': resolve('src/plugins/plugin-input-jetbrains-config/index.ts'), - '@truenine/plugin-input-md-cleanup-effect': resolve('src/plugins/plugin-input-md-cleanup-effect/index.ts'), - '@truenine/plugin-input-orphan-cleanup-effect': resolve('src/plugins/plugin-input-orphan-cleanup-effect/index.ts'), - '@truenine/plugin-input-project-prompt': resolve('src/plugins/plugin-input-project-prompt/index.ts'), - '@truenine/plugin-input-readme': resolve('src/plugins/plugin-input-readme/index.ts'), - '@truenine/plugin-input-rule': resolve('src/plugins/plugin-input-rule/index.ts'), - '@truenine/plugin-input-shadow-project': resolve('src/plugins/plugin-input-shadow-project/index.ts'), - '@truenine/plugin-input-shared-ignore': resolve('src/plugins/plugin-input-shared-ignore/index.ts'), - '@truenine/plugin-input-skill-sync-effect': resolve('src/plugins/plugin-input-skill-sync-effect/index.ts'), - '@truenine/plugin-input-subagent': resolve('src/plugins/plugin-input-subagent/index.ts'), - '@truenine/plugin-input-vscode-config': resolve('src/plugins/plugin-input-vscode-config/index.ts'), - '@truenine/plugin-input-workspace': resolve('src/plugins/plugin-input-workspace/index.ts'), - '@truenine/plugin-jetbrains-ai-codex': resolve('src/plugins/plugin-jetbrains-ai-codex.ts'), - '@truenine/plugin-jetbrains-codestyle': resolve('src/plugins/plugin-jetbrains-codestyle.ts'), - '@truenine/plugin-openai-codex-cli': resolve('src/plugins/plugin-openai-codex-cli.ts'), - '@truenine/plugin-opencode-cli': resolve('src/plugins/plugin-opencode-cli.ts'), - '@truenine/plugin-qoder-ide': resolve('src/plugins/plugin-qoder-ide.ts'), - '@truenine/plugin-readme': resolve('src/plugins/plugin-readme.ts'), - '@truenine/plugin-trae-ide': resolve('src/plugins/plugin-trae-ide.ts'), - '@truenine/plugin-vscode': resolve('src/plugins/plugin-vscode.ts'), - '@truenine/plugin-warp-ide': resolve('src/plugins/plugin-warp-ide.ts'), - '@truenine/plugin-windsurf': resolve('src/plugins/plugin-windsurf.ts'), - '@truenine/plugin-zed': resolve('src/plugins/plugin-zed.ts') -} - -const noExternalDeps = [ - '@truenine/logger', - '@truenine/script-runtime', - 'fast-glob', - 'jiti', - '@truenine/desk-paths', - '@truenine/md-compiler', - ...Object.keys(pluginAliases) -] +const noExternalDeps = ['@truenine/memory-sync-sdk'] export default defineConfig([ { - entry: ['./src/index.ts', '!**/*.{spec,test}.*'], + entry: ['./src/index.ts'], platform: 'node', sourcemap: false, unbundle: false, deps: { onlyBundle: false }, - alias: { - '@': resolve('src'), - ...pluginAliases - }, noExternal: noExternalDeps, format: ['esm'], minify: true, dts: {sourcemap: false}, - outputOptions: {exports: 'named'}, - define: { - __CLI_VERSION__: JSON.stringify(pkg.version), - __CLI_PACKAGE_NAME__: JSON.stringify(pkg.name), - __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry - } - }, - { - entry: ['./src/plugin-runtime.ts'], - platform: 'node', - sourcemap: false, - unbundle: false, - deps: { - onlyBundle: false - }, - alias: { - '@': resolve('src'), - ...pluginAliases - }, - noExternal: noExternalDeps, - format: ['esm'], - minify: true, - dts: false, - define: { - __CLI_VERSION__: JSON.stringify(pkg.version), - __CLI_PACKAGE_NAME__: JSON.stringify(pkg.name), - __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry - } - }, - { - entry: ['./src/script-runtime-worker.ts'], - platform: 'node', - sourcemap: false, - unbundle: false, - deps: { - onlyBundle: false - }, - alias: { - '@': resolve('src'), - ...pluginAliases - }, - noExternal: noExternalDeps, - format: ['esm'], - minify: false, - dts: false, - define: { - __CLI_VERSION__: JSON.stringify(pkg.version), - __CLI_PACKAGE_NAME__: JSON.stringify(pkg.name), - __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry - } + outputOptions: {exports: 'named'} }, { entry: ['./src/globals.ts'], platform: 'node', sourcemap: false, - alias: { - '@': resolve('src'), - ...pluginAliases - }, + noExternal: noExternalDeps, format: ['esm'], minify: false, dts: {sourcemap: false} diff --git a/cli/vite.config.ts b/cli/vite.config.ts index 1c390295..7830f95c 100644 --- a/cli/vite.config.ts +++ b/cli/vite.config.ts @@ -1,75 +1,3 @@ -import {readFileSync} from 'node:fs' -import {resolve} from 'node:path' -import {fileURLToPath, URL} from 'node:url' import {defineConfig} from 'vite' -const pkg = JSON.parse(readFileSync('./package.json', 'utf8')) as {version: string, name: string} -const kiroGlobalPowersRegistry = '{"version":"1.0.0","powers":{},"repoSources":{}}' -const workspacePackageAliases: Record = { - '@truenine/md-compiler/errors': resolve('../libraries/md-compiler/dist/errors/index.mjs'), - '@truenine/md-compiler/globals': resolve('../libraries/md-compiler/dist/globals/index.mjs'), - '@truenine/md-compiler/markdown': resolve('../libraries/md-compiler/dist/markdown/index.mjs'), - '@truenine/md-compiler': resolve('../libraries/md-compiler/dist/index.mjs') -} - -const pluginAliases: Record = { - '@truenine/desk-paths': resolve('src/core/desk-paths.ts'), - '@truenine/plugin-output-shared': resolve('src/plugins/plugin-output-shared/index.ts'), - '@truenine/plugin-output-shared/utils': resolve('src/plugins/plugin-output-shared/utils/index.ts'), - '@truenine/plugin-output-shared/registry': resolve('src/plugins/plugin-output-shared/registry/index.ts'), - '@truenine/plugin-input-shared': resolve('src/plugins/plugin-input-shared/index.ts'), - '@truenine/plugin-input-shared/scope': resolve('src/plugins/plugin-input-shared/scope/index.ts'), - '@truenine/plugin-agentskills-compact': resolve('src/plugins/plugin-agentskills-compact.ts'), - '@truenine/plugin-agentsmd': resolve('src/plugins/plugin-agentsmd.ts'), - '@truenine/plugin-antigravity': resolve('src/plugins/plugin-antigravity/index.ts'), - '@truenine/plugin-claude-code-cli': resolve('src/plugins/plugin-claude-code-cli.ts'), - '@truenine/plugin-cursor': resolve('src/plugins/plugin-cursor.ts'), - '@truenine/plugin-droid-cli': resolve('src/plugins/plugin-droid-cli.ts'), - '@truenine/plugin-editorconfig': resolve('src/plugins/plugin-editorconfig.ts'), - '@truenine/plugin-gemini-cli': resolve('src/plugins/plugin-gemini-cli.ts'), - '@truenine/plugin-git-exclude': resolve('src/plugins/plugin-git-exclude.ts'), - '@truenine/plugin-input-agentskills': resolve('src/plugins/plugin-input-agentskills/index.ts'), - '@truenine/plugin-input-editorconfig': resolve('src/plugins/plugin-input-editorconfig/index.ts'), - '@truenine/plugin-input-fast-command': resolve('src/plugins/plugin-input-fast-command/index.ts'), - '@truenine/plugin-input-git-exclude': resolve('src/plugins/plugin-input-git-exclude/index.ts'), - '@truenine/plugin-input-gitignore': resolve('src/plugins/plugin-input-gitignore/index.ts'), - '@truenine/plugin-input-global-memory': resolve('src/plugins/plugin-input-global-memory/index.ts'), - '@truenine/plugin-input-jetbrains-config': resolve('src/plugins/plugin-input-jetbrains-config/index.ts'), - '@truenine/plugin-input-md-cleanup-effect': resolve('src/plugins/plugin-input-md-cleanup-effect/index.ts'), - '@truenine/plugin-input-orphan-cleanup-effect': resolve('src/plugins/plugin-input-orphan-cleanup-effect/index.ts'), - '@truenine/plugin-input-project-prompt': resolve('src/plugins/plugin-input-project-prompt/index.ts'), - '@truenine/plugin-input-readme': resolve('src/plugins/plugin-input-readme/index.ts'), - '@truenine/plugin-input-rule': resolve('src/plugins/plugin-input-rule/index.ts'), - '@truenine/plugin-input-shadow-project': resolve('src/plugins/plugin-input-shadow-project/index.ts'), - '@truenine/plugin-input-shared-ignore': resolve('src/plugins/plugin-input-shared-ignore/index.ts'), - '@truenine/plugin-input-skill-sync-effect': resolve('src/plugins/plugin-input-skill-sync-effect/index.ts'), - '@truenine/plugin-input-subagent': resolve('src/plugins/plugin-input-subagent/index.ts'), - '@truenine/plugin-input-vscode-config': resolve('src/plugins/plugin-input-vscode-config/index.ts'), - '@truenine/plugin-input-workspace': resolve('src/plugins/plugin-input-workspace/index.ts'), - '@truenine/plugin-jetbrains-ai-codex': resolve('src/plugins/plugin-jetbrains-ai-codex.ts'), - '@truenine/plugin-jetbrains-codestyle': resolve('src/plugins/plugin-jetbrains-codestyle.ts'), - '@truenine/plugin-openai-codex-cli': resolve('src/plugins/plugin-openai-codex-cli.ts'), - '@truenine/plugin-opencode-cli': resolve('src/plugins/plugin-opencode-cli.ts'), - '@truenine/plugin-qoder-ide': resolve('src/plugins/plugin-qoder-ide.ts'), - '@truenine/plugin-readme': resolve('src/plugins/plugin-readme.ts'), - '@truenine/plugin-trae-ide': resolve('src/plugins/plugin-trae-ide.ts'), - '@truenine/plugin-vscode': resolve('src/plugins/plugin-vscode.ts'), - '@truenine/plugin-warp-ide': resolve('src/plugins/plugin-warp-ide.ts'), - '@truenine/plugin-windsurf': resolve('src/plugins/plugin-windsurf.ts'), - '@truenine/plugin-zed': resolve('src/plugins/plugin-zed.ts') -} - -export default defineConfig({ - resolve: { - alias: { - '@': fileURLToPath(new URL('./src', import.meta.url)), - ...workspacePackageAliases, - ...pluginAliases - } - }, - define: { - __CLI_VERSION__: JSON.stringify(pkg.version), - __CLI_PACKAGE_NAME__: JSON.stringify(pkg.name), - __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry - } -}) +export default defineConfig({}) diff --git a/cli/vitest.config.ts b/cli/vitest.config.ts index c80ffd11..e6571c26 100644 --- a/cli/vitest.config.ts +++ b/cli/vitest.config.ts @@ -12,7 +12,7 @@ export default mergeConfig( passWithNoTests: true, exclude: [...configDefaults.exclude, 'e2e/*'], root: fileURLToPath(new URL('./', import.meta.url)), - setupFiles: ['./test/setup-native-binding.ts'], + setupFiles: ['../sdk/test/setup-native-binding.ts'], typecheck: { enabled: true, tsconfig: './tsconfig.test.json' From 85a45ab5c23960872745d0fba7da79a3336ce621 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:31:31 +0800 Subject: [PATCH 08/27] feat: remove unused native binding test files for cleanup functionality --- cli/test/native-binding/cleanup.ts | 500 -------------------------- cli/test/native-binding/desk-paths.ts | 264 -------------- cli/test/setup-native-binding.ts | 215 ----------- 3 files changed, 979 deletions(-) delete mode 100644 cli/test/native-binding/cleanup.ts delete mode 100644 cli/test/native-binding/desk-paths.ts delete mode 100644 cli/test/setup-native-binding.ts diff --git a/cli/test/native-binding/cleanup.ts b/cli/test/native-binding/cleanup.ts deleted file mode 100644 index d1320e98..00000000 --- a/cli/test/native-binding/cleanup.ts +++ /dev/null @@ -1,500 +0,0 @@ -import type { - ILogger, - OutputCleanContext, - OutputCleanupDeclarations, - OutputCleanupPathDeclaration, - OutputFileDeclaration, - OutputPlugin, - PluginOptions -} from '../../src/plugins/plugin-core' -import type {ProtectedPathRule, ProtectionMode, ProtectionRuleMatcher} from '../../src/ProtectedDeletionGuard' -import type {DeletionError} from './desk-paths' -import * as fs from 'node:fs' -import * as path from 'node:path' -import glob from 'fast-glob' -import {compactDeletionTargets} from '../../src/cleanup/delete-targets' -import {planWorkspaceEmptyDirectoryCleanup} from '../../src/cleanup/empty-directories' -import {buildDiagnostic, buildFileOperationDiagnostic, diagnosticLines} from '../../src/diagnostics' -import {collectAllPluginOutputs} from '../../src/plugins/plugin-core' -import { - buildComparisonKeys, - collectConfiguredAindexInputRules, - collectProjectRoots, - collectProtectedInputSourceRules, - createProtectedDeletionGuard, - logProtectedDeletionGuardError, - partitionDeletionTargets, - resolveAbsolutePath -} from '../../src/ProtectedDeletionGuard' -import {deleteEmptyDirectories, deleteTargets as deskDeleteTargets} from './desk-paths' - -/** - * Result of cleanup operation - */ -export interface CleanupResult { - readonly deletedFiles: number - readonly deletedDirs: number - readonly errors: readonly CleanupError[] - readonly violations: readonly import('../../src/ProtectedDeletionGuard').ProtectedPathViolation[] - readonly conflicts: readonly CleanupProtectionConflict[] - readonly message?: string -} - -/** - * Error during cleanup operation - */ -export interface CleanupError { - readonly path: string - readonly type: 'file' | 'directory' - readonly error: unknown -} - -export interface CleanupProtectionConflict { - readonly outputPath: string - readonly outputPlugin: string - readonly protectedPath: string - readonly protectionMode: ProtectionMode - readonly protectedBy: string - readonly reason: string -} - -export class CleanupProtectionConflictError extends Error { - readonly conflicts: readonly CleanupProtectionConflict[] - - constructor(conflicts: readonly CleanupProtectionConflict[]) { - super(buildCleanupProtectionConflictMessage(conflicts)) - this.name = 'CleanupProtectionConflictError' - this.conflicts = conflicts - } -} - -interface CleanupTargetCollections { - readonly filesToDelete: string[] - readonly dirsToDelete: string[] - readonly emptyDirsToDelete: string[] - readonly violations: readonly import('../../src/ProtectedDeletionGuard').ProtectedPathViolation[] - readonly conflicts: readonly CleanupProtectionConflict[] - readonly excludedScanGlobs: string[] -} - -const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS = ['**/node_modules/**', '**/.git/**', '**/.turbo/**', '**/.pnpm-store/**', '**/.yarn/**', '**/.next/**'] as const - -function normalizeGlobPattern(pattern: string): string { - return resolveAbsolutePath(pattern).replaceAll('\\', '/') -} - -function expandCleanupGlob(pattern: string, ignoreGlobs: readonly string[]): readonly string[] { - const normalizedPattern = normalizeGlobPattern(pattern) - return glob.sync(normalizedPattern, { - onlyFiles: false, - dot: true, - absolute: true, - followSymbolicLinks: false, - ignore: [...ignoreGlobs] - }) -} - -function shouldExcludeCleanupMatch(matchedPath: string, target: OutputCleanupPathDeclaration): boolean { - if (target.excludeBasenames == null || target.excludeBasenames.length === 0) return false - const basename = path.basename(matchedPath) - return target.excludeBasenames.includes(basename) -} - -async function collectPluginCleanupDeclarations(plugin: OutputPlugin, cleanCtx: OutputCleanContext): Promise { - if (plugin.declareCleanupPaths == null) return {} - return plugin.declareCleanupPaths({...cleanCtx, dryRun: true}) -} - -async function collectPluginCleanupSnapshot( - plugin: OutputPlugin, - cleanCtx: OutputCleanContext, - predeclaredOutputs?: ReadonlyMap -): Promise<{ - readonly plugin: OutputPlugin - readonly outputs: Awaited> - readonly cleanup: OutputCleanupDeclarations -}> { - const existingOutputDeclarations = predeclaredOutputs?.get(plugin) - const [outputs, cleanup] = await Promise.all([ - existingOutputDeclarations != null ? Promise.resolve(existingOutputDeclarations) : plugin.declareOutputFiles({...cleanCtx, dryRun: true}), - collectPluginCleanupDeclarations(plugin, cleanCtx) - ]) - - return {plugin, outputs, cleanup} -} - -function buildCleanupProtectionConflictMessage(conflicts: readonly CleanupProtectionConflict[]): string { - const pathList = conflicts.map(conflict => conflict.outputPath).join(', ') - return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}` -} - -function detectCleanupProtectionConflicts( - outputPathOwners: ReadonlyMap, - guard: ReturnType -): CleanupProtectionConflict[] { - const conflicts: CleanupProtectionConflict[] = [] - - for (const [outputPath, outputPlugins] of outputPathOwners.entries()) { - const outputKeys = new Set(buildComparisonKeys(outputPath)) - - for (const rule of guard.compiledRules) { - const isExactMatch = rule.comparisonKeys.some(ruleKey => outputKeys.has(ruleKey)) - if (!isExactMatch) continue - - for (const outputPlugin of outputPlugins) { - conflicts.push({ - outputPath, - outputPlugin, - protectedPath: rule.path, - protectionMode: rule.protectionMode, - protectedBy: rule.source, - reason: rule.reason - }) - } - } - } - - return conflicts.sort((a, b) => { - const pathDiff = a.outputPath.localeCompare(b.outputPath) - if (pathDiff !== 0) return pathDiff - return a.protectedPath.localeCompare(b.protectedPath) - }) -} - -function logCleanupProtectionConflicts(logger: ILogger, conflicts: readonly CleanupProtectionConflict[]): void { - const firstConflict = conflicts[0] - - logger.error( - buildDiagnostic({ - code: 'CLEANUP_PROTECTION_CONFLICT_DETECTED', - title: 'Cleanup output paths conflict with protected inputs', - rootCause: diagnosticLines( - `tnmsc found ${conflicts.length} output path(s) that also match protected cleanup rules.`, - firstConflict == null - ? 'No conflict details were captured.' - : `Example conflict: "${firstConflict.outputPath}" is protected by "${firstConflict.protectedPath}".` - ), - exactFix: diagnosticLines('Separate generated output paths from protected source or reserved workspace paths before running cleanup again.'), - possibleFixes: [ - diagnosticLines('Update cleanup protect declarations so they do not overlap generated outputs.'), - diagnosticLines('Move the conflicting output target to a generated-only directory.') - ], - details: { - count: conflicts.length, - conflicts: conflicts.map(conflict => ({ - outputPath: conflict.outputPath, - outputPlugin: conflict.outputPlugin, - protectedPath: conflict.protectedPath, - protectionMode: conflict.protectionMode, - protectedBy: conflict.protectedBy, - reason: conflict.reason - })) - } - }) - ) -} - -/** - * Collect deletion targets from enabled output plugins. - */ -async function collectCleanupTargets( - outputPlugins: readonly OutputPlugin[], - cleanCtx: OutputCleanContext, - predeclaredOutputs?: ReadonlyMap -): Promise { - const deleteFiles = new Set() - const deleteDirs = new Set() - const protectedRules = new Map() - const excludeScanGlobSet = new Set(DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS) - const outputPathOwners = new Map() - - const pluginSnapshots = await Promise.all(outputPlugins.map(async plugin => collectPluginCleanupSnapshot(plugin, cleanCtx, predeclaredOutputs))) - - const addDeletePath = (rawPath: string, kind: 'file' | 'directory'): void => { - if (kind === 'directory') deleteDirs.add(resolveAbsolutePath(rawPath)) - else deleteFiles.add(resolveAbsolutePath(rawPath)) - } - - const addProtectRule = (rawPath: string, protectionMode: ProtectionMode, reason: string, source: string, matcher: ProtectionRuleMatcher = 'path'): void => { - const resolvedPath = resolveAbsolutePath(rawPath) - protectedRules.set(`${matcher}:${protectionMode}:${resolvedPath}`, { - path: resolvedPath, - protectionMode, - reason, - source, - matcher - }) - } - - const defaultProtectionModeForTarget = (target: OutputCleanupPathDeclaration): ProtectionMode => { - if (target.protectionMode != null) return target.protectionMode - return target.kind === 'file' ? 'direct' : 'recursive' - } - - for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) { - addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source) - } - if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) { - for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, { - workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path - })) { - addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source, rule.matcher) - } - } - - for (const rule of cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []) { - addProtectRule( - rule.path, - rule.protectionMode, - rule.reason ?? 'configured cleanup protection rule', - 'configured-cleanup-protection', - rule.matcher ?? 'path' - ) - } - - for (const snapshot of pluginSnapshots) { - for (const declaration of snapshot.outputs) { - const resolvedOutputPath = resolveAbsolutePath(declaration.path) - addDeletePath(resolvedOutputPath, 'file') - const existingOwners = outputPathOwners.get(resolvedOutputPath) - if (existingOwners == null) outputPathOwners.set(resolvedOutputPath, [snapshot.plugin.name]) - else if (!existingOwners.includes(snapshot.plugin.name)) existingOwners.push(snapshot.plugin.name) - } - for (const ignoreGlob of snapshot.cleanup.excludeScanGlobs ?? []) excludeScanGlobSet.add(normalizeGlobPattern(ignoreGlob)) - } - - const excludeScanGlobs = [...excludeScanGlobSet] - - const resolveDeleteGlob = (target: OutputCleanupPathDeclaration): void => { - for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) { - if (shouldExcludeCleanupMatch(matchedPath, target)) continue - - try { - const stat = fs.lstatSync(matchedPath) - if (stat.isDirectory()) addDeletePath(matchedPath, 'directory') - else addDeletePath(matchedPath, 'file') - } catch {} - } - } - - const resolveProtectGlob = (target: OutputCleanupPathDeclaration, pluginName: string): void => { - const protectionMode = defaultProtectionModeForTarget(target) - const reason = target.label != null ? `plugin cleanup protect declaration (${target.label})` : 'plugin cleanup protect declaration' - - for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) { - addProtectRule(matchedPath, protectionMode, reason, `plugin-cleanup-protect:${pluginName}`) - } - } - - for (const {plugin, cleanup} of pluginSnapshots) { - for (const target of cleanup.protect ?? []) { - if (target.kind === 'glob') { - resolveProtectGlob(target, plugin.name) - continue - } - addProtectRule( - target.path, - defaultProtectionModeForTarget(target), - target.label != null ? `plugin cleanup protect declaration (${target.label})` : 'plugin cleanup protect declaration', - `plugin-cleanup-protect:${plugin.name}` - ) - } - - for (const target of cleanup.delete ?? []) { - if (target.kind === 'glob') { - resolveDeleteGlob(target) - continue - } - if (target.kind === 'directory') addDeletePath(target.path, 'directory') - else addDeletePath(target.path, 'file') - } - } - - const guard = createProtectedDeletionGuard({ - workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path, - projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext), - rules: [...protectedRules.values()], - ...cleanCtx.collectedOutputContext.aindexDir != null ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir} : {} - }) - const conflicts = detectCleanupProtectionConflicts(outputPathOwners, guard) - if (conflicts.length > 0) throw new CleanupProtectionConflictError(conflicts) - const filePartition = partitionDeletionTargets([...deleteFiles], guard) - const dirPartition = partitionDeletionTargets([...deleteDirs], guard) - - const compactedTargets = compactDeletionTargets(filePartition.safePaths, dirPartition.safePaths) - const emptyDirectoryPlan = planWorkspaceEmptyDirectoryCleanup({ - fs, - path, - workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path, - filesToDelete: compactedTargets.files, - dirsToDelete: compactedTargets.dirs - }) - - return { - filesToDelete: compactedTargets.files, - dirsToDelete: compactedTargets.dirs, - emptyDirsToDelete: emptyDirectoryPlan.emptyDirsToDelete, - violations: [...filePartition.violations, ...dirPartition.violations].sort((a, b) => a.targetPath.localeCompare(b.targetPath)), - conflicts: [], - excludedScanGlobs: [...excludeScanGlobSet].sort((a, b) => a.localeCompare(b)) - } -} - -export async function collectDeletionTargets( - outputPlugins: readonly OutputPlugin[], - cleanCtx: OutputCleanContext, - predeclaredOutputs?: ReadonlyMap -): Promise<{ - filesToDelete: string[] - dirsToDelete: string[] - emptyDirsToDelete: string[] - violations: import('../../src/ProtectedDeletionGuard').ProtectedPathViolation[] - conflicts: CleanupProtectionConflict[] - excludedScanGlobs: string[] -}> { - const targets = await collectCleanupTargets(outputPlugins, cleanCtx, predeclaredOutputs) - return { - filesToDelete: targets.filesToDelete, - dirsToDelete: targets.dirsToDelete.sort((a, b) => a.localeCompare(b)), - emptyDirsToDelete: targets.emptyDirsToDelete.sort((a, b) => a.localeCompare(b)), - violations: [...targets.violations], - conflicts: [...targets.conflicts], - excludedScanGlobs: [...targets.excludedScanGlobs] - } -} - -function buildCleanupErrors(logger: ILogger, errors: readonly DeletionError[], type: 'file' | 'directory'): CleanupError[] { - return errors.map(currentError => { - const errorMessage = currentError.error instanceof Error ? currentError.error.message : String(currentError.error) - logger.warn( - buildFileOperationDiagnostic({ - code: type === 'file' ? 'CLEANUP_FILE_DELETE_FAILED' : 'CLEANUP_DIRECTORY_DELETE_FAILED', - title: type === 'file' ? 'Cleanup could not delete a file' : 'Cleanup could not delete a directory', - operation: 'delete', - targetKind: type, - path: currentError.path, - error: errorMessage, - details: { - phase: 'cleanup' - } - }) - ) - - return {path: currentError.path, type, error: currentError.error} - }) -} - -async function executeCleanupTargets( - targets: CleanupTargetCollections, - logger: ILogger -): Promise<{deletedFiles: number, deletedDirs: number, errors: CleanupError[]}> { - logger.debug('cleanup delete execution started', { - filesToDelete: targets.filesToDelete.length, - dirsToDelete: targets.dirsToDelete.length + targets.emptyDirsToDelete.length, - emptyDirsToDelete: targets.emptyDirsToDelete.length - }) - - const result = await deskDeleteTargets({ - files: targets.filesToDelete, - dirs: targets.dirsToDelete - }) - const emptyDirResult = await deleteEmptyDirectories(targets.emptyDirsToDelete) - - const fileErrors = buildCleanupErrors(logger, result.fileErrors, 'file') - const dirErrors = buildCleanupErrors(logger, [...result.dirErrors, ...emptyDirResult.errors], 'directory') - const allErrors = [...fileErrors, ...dirErrors] - - logger.debug('cleanup delete execution complete', { - deletedFiles: result.deletedFiles.length, - deletedDirs: result.deletedDirs.length + emptyDirResult.deletedPaths.length, - errors: allErrors.length - }) - - return { - deletedFiles: result.deletedFiles.length, - deletedDirs: result.deletedDirs.length + emptyDirResult.deletedPaths.length, - errors: allErrors - } -} - -function logCleanupPlanDiagnostics(logger: ILogger, targets: CleanupTargetCollections): void { - logger.debug('cleanup plan built', { - filesToDelete: targets.filesToDelete.length, - dirsToDelete: targets.dirsToDelete.length + targets.emptyDirsToDelete.length, - emptyDirsToDelete: targets.emptyDirsToDelete.length, - violations: targets.violations.length, - conflicts: targets.conflicts.length, - excludedScanGlobs: targets.excludedScanGlobs - }) -} - -/** - * Perform cleanup operation for output plugins. - * This is the main reusable cleanup function that can be called from both - * CleanCommand and ExecuteCommand (for pre-cleanup). - */ -export async function performCleanup( - outputPlugins: readonly OutputPlugin[], - cleanCtx: OutputCleanContext, - logger: ILogger, - predeclaredOutputs?: ReadonlyMap -): Promise { - if (predeclaredOutputs != null) { - const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx, predeclaredOutputs) - logger.debug('Collected outputs for cleanup', { - projectDirs: outputs.projectDirs.length, - projectFiles: outputs.projectFiles.length, - globalDirs: outputs.globalDirs.length, - globalFiles: outputs.globalFiles.length - }) - } - - let targets: CleanupTargetCollections - try { - targets = await collectCleanupTargets(outputPlugins, cleanCtx, predeclaredOutputs) - } catch (error) { - if (error instanceof CleanupProtectionConflictError) { - logCleanupProtectionConflicts(logger, error.conflicts) - return { - deletedFiles: 0, - deletedDirs: 0, - errors: [], - violations: [], - conflicts: error.conflicts, - message: error.message - } - } - throw error - } - const cleanupTargets: CleanupTargetCollections = { - filesToDelete: targets.filesToDelete, - dirsToDelete: targets.dirsToDelete, - emptyDirsToDelete: targets.emptyDirsToDelete, - violations: targets.violations, - conflicts: targets.conflicts, - excludedScanGlobs: targets.excludedScanGlobs - } - logCleanupPlanDiagnostics(logger, cleanupTargets) - - if (cleanupTargets.violations.length > 0) { - logProtectedDeletionGuardError(logger, 'cleanup', cleanupTargets.violations) - return { - deletedFiles: 0, - deletedDirs: 0, - errors: [], - violations: cleanupTargets.violations, - conflicts: [], - message: `Protected deletion guard blocked cleanup for ${cleanupTargets.violations.length} path(s)` - } - } - - const executionResult = await executeCleanupTargets(cleanupTargets, logger) - - return { - deletedFiles: executionResult.deletedFiles, - deletedDirs: executionResult.deletedDirs, - errors: executionResult.errors, - violations: [], - conflicts: [] - } -} diff --git a/cli/test/native-binding/desk-paths.ts b/cli/test/native-binding/desk-paths.ts deleted file mode 100644 index 243ac6bf..00000000 --- a/cli/test/native-binding/desk-paths.ts +++ /dev/null @@ -1,264 +0,0 @@ -import type {Buffer} from 'node:buffer' -import type {LoggerDiagnosticInput} from '../../src/plugins/plugin-core' -import * as fs from 'node:fs' -import path from 'node:path' -import process from 'node:process' -import {buildFileOperationDiagnostic} from '../../src/diagnostics' -import {resolveRuntimeEnvironment, resolveUserPath} from '../../src/runtime-environment' - -type PlatformFixedDir = 'win32' | 'darwin' | 'linux' - -function getLinuxDataDir(homeDir: string): string { - const xdgDataHome = process.env['XDG_DATA_HOME'] - if (typeof xdgDataHome === 'string' && xdgDataHome.trim().length > 0) return resolveUserPath(xdgDataHome) - return path.join(homeDir, '.local', 'share') -} - -export function getPlatformFixedDir(): string { - const runtimeEnvironment = resolveRuntimeEnvironment() - const platform = (runtimeEnvironment.isWsl ? 'win32' : runtimeEnvironment.platform) as PlatformFixedDir - const homeDir = runtimeEnvironment.effectiveHomeDir - - if (platform === 'win32') return resolveUserPath(process.env['LOCALAPPDATA'] ?? path.join(homeDir, 'AppData', 'Local')) - if (platform === 'darwin') return path.join(homeDir, 'Library', 'Application Support') - if (platform === 'linux') return getLinuxDataDir(homeDir) - - throw new Error(`Unsupported platform: ${process.platform}`) -} - -export function ensureDir(dir: string): void { - fs.mkdirSync(dir, {recursive: true}) -} - -export function existsSync(p: string): boolean { - return fs.existsSync(p) -} - -export function deletePathSync(p: string): void { - if (!fs.existsSync(p)) return - - const stat = fs.lstatSync(p) - if (stat.isSymbolicLink()) { - if (process.platform === 'win32') fs.rmSync(p, {recursive: true, force: true}) - else fs.unlinkSync(p) - } else if (stat.isDirectory()) fs.rmSync(p, {recursive: true, force: true}) - else fs.unlinkSync(p) -} - -export function writeFileSync(filePath: string, data: string | Buffer, encoding: BufferEncoding = 'utf8'): void { - const parentDir = path.dirname(filePath) - ensureDir(parentDir) - if (typeof data === 'string') fs.writeFileSync(filePath, data, encoding) - else fs.writeFileSync(filePath, data) -} - -export function readFileSync(filePath: string, encoding: BufferEncoding = 'utf8'): string { - try { - return fs.readFileSync(filePath, encoding) - } catch (error) { - const msg = error instanceof Error ? error.message : String(error) - throw new Error(`Failed to read file "${filePath}": ${msg}`) - } -} - -export interface DeletionError { - readonly path: string - readonly error: unknown -} - -export interface DeletionResult { - readonly deleted: number - readonly deletedPaths: readonly string[] - readonly errors: readonly DeletionError[] -} - -export interface DeleteTargetsResult { - readonly deletedFiles: readonly string[] - readonly deletedDirs: readonly string[] - readonly fileErrors: readonly DeletionError[] - readonly dirErrors: readonly DeletionError[] -} - -const DELETE_CONCURRENCY = 32 - -async function deletePath(p: string): Promise { - try { - const stat = await fs.promises.lstat(p) - if (stat.isSymbolicLink()) { - await (process.platform === 'win32' ? fs.promises.rm(p, {recursive: true, force: true}) : fs.promises.unlink(p)) - return true - } - - if (stat.isDirectory()) { - await fs.promises.rm(p, {recursive: true, force: true}) - return true - } - - await fs.promises.unlink(p) - return true - } catch (error) { - if ((error as NodeJS.ErrnoException).code === 'ENOENT') return false - throw error - } -} - -async function deleteEmptyDirectory(p: string): Promise { - try { - await fs.promises.rmdir(p) - return true - } catch (error) { - const {code} = error as NodeJS.ErrnoException - if (code === 'ENOENT' || code === 'ENOTEMPTY') return false - throw error - } -} - -async function mapWithConcurrencyLimit(items: readonly T[], concurrency: number, worker: (item: T) => Promise): Promise { - if (items.length === 0) return [] - - const results: TResult[] = [] - let nextIndex = 0 - - const runWorker = async (): Promise => { - while (true) { - const currentIndex = nextIndex - if (currentIndex >= items.length) return - - nextIndex += 1 - results[currentIndex] = await worker(items[currentIndex] as T) - } - } - - const workerCount = Math.min(concurrency, items.length) - const workers: Promise[] = [] - for (let index = 0; index < workerCount; index += 1) { - workers.push(runWorker()) - } - await Promise.all(workers) - - return results -} - -async function deletePaths(paths: readonly string[], options?: {readonly sortByDepthDescending?: boolean}): Promise { - const sortedPaths = options?.sortByDepthDescending === true ? [...paths].sort((a, b) => b.length - a.length || b.localeCompare(a)) : [...paths] - - const results = await mapWithConcurrencyLimit(sortedPaths, DELETE_CONCURRENCY, async currentPath => { - try { - const deleted = await deletePath(currentPath) - return {path: currentPath, deleted} - } catch (error) { - return {path: currentPath, error} - } - }) - - const deletedPaths: string[] = [] - const errors: DeletionError[] = [] - - for (const result of results) { - if ('error' in result) { - errors.push({path: result.path, error: result.error}) - continue - } - - if (result.deleted) deletedPaths.push(result.path) - } - - return { - deleted: deletedPaths.length, - deletedPaths, - errors - } -} - -export async function deleteFiles(files: readonly string[]): Promise { - return deletePaths(files) -} - -export async function deleteDirectories(dirs: readonly string[]): Promise { - return deletePaths(dirs, {sortByDepthDescending: true}) -} - -export async function deleteEmptyDirectories(dirs: readonly string[]): Promise { - const sortedPaths = [...dirs].sort((a, b) => b.length - a.length || b.localeCompare(a)) - const deletedPaths: string[] = [] - const errors: DeletionError[] = [] - - for (const currentPath of sortedPaths) { - try { - const deleted = await deleteEmptyDirectory(currentPath) - if (deleted) deletedPaths.push(currentPath) - } catch (error) { - errors.push({path: currentPath, error}) - } - } - - return { - deleted: deletedPaths.length, - deletedPaths, - errors - } -} - -export async function deleteTargets(targets: {readonly files?: readonly string[], readonly dirs?: readonly string[]}): Promise { - const [fileResult, dirResult] = await Promise.all([deleteFiles(targets.files ?? []), deleteDirectories(targets.dirs ?? [])]) - - return { - deletedFiles: fileResult.deletedPaths, - deletedDirs: dirResult.deletedPaths, - fileErrors: fileResult.errors, - dirErrors: dirResult.errors - } -} - -export interface WriteLogger { - readonly trace: (data: object) => void - readonly error: (diagnostic: LoggerDiagnosticInput) => void -} - -export interface SafeWriteOptions { - readonly fullPath: string - readonly content: string | Buffer - readonly type: string - readonly relativePath: string - readonly dryRun: boolean - readonly logger: WriteLogger -} - -export interface SafeWriteResult { - readonly path: string - readonly success: boolean - readonly skipped?: boolean - readonly error?: Error -} - -export function writeFileSafe(options: SafeWriteOptions): SafeWriteResult { - const {fullPath, content, type, relativePath, dryRun, logger} = options - - if (dryRun) { - logger.trace({action: 'dryRun', type, path: fullPath}) - return {path: relativePath, success: true, skipped: false} - } - - try { - writeFileSync(fullPath, content) - logger.trace({action: 'write', type, path: fullPath}) - return {path: relativePath, success: true} - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - logger.error( - buildFileOperationDiagnostic({ - code: 'OUTPUT_FILE_WRITE_FAILED', - title: `Failed to write ${type} output`, - operation: 'write', - targetKind: `${type} output file`, - path: fullPath, - error: errMsg, - details: { - relativePath, - type - } - }) - ) - return {path: relativePath, success: false, error: error as Error} - } -} diff --git a/cli/test/setup-native-binding.ts b/cli/test/setup-native-binding.ts deleted file mode 100644 index 48fa5e04..00000000 --- a/cli/test/setup-native-binding.ts +++ /dev/null @@ -1,215 +0,0 @@ -import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../src/plugins/plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import glob from 'fast-glob' -import {FilePathKind, PluginKind} from '../src/plugins/plugin-core/enums' -import * as deskPaths from './native-binding/desk-paths' - -interface NativeCleanupTarget { - readonly path: string - readonly kind: 'file' | 'directory' | 'glob' - readonly excludeBasenames?: readonly string[] - readonly protectionMode?: 'direct' | 'recursive' - readonly scope?: string - readonly label?: string -} - -interface NativeCleanupDeclarations { - readonly delete?: readonly NativeCleanupTarget[] - readonly protect?: readonly NativeCleanupTarget[] - readonly excludeScanGlobs?: readonly string[] -} - -interface NativePluginCleanupSnapshot { - readonly pluginName: string - readonly outputs: readonly string[] - readonly cleanup: NativeCleanupDeclarations -} - -interface NativeProtectedRule { - readonly path: string - readonly protectionMode: 'direct' | 'recursive' - readonly reason: string - readonly source: string - readonly matcher?: 'path' | 'glob' -} - -interface NativeCleanupSnapshot { - readonly workspaceDir: string - readonly aindexDir?: string - readonly projectRoots: readonly string[] - readonly protectedRules: readonly NativeProtectedRule[] - readonly pluginSnapshots: readonly NativePluginCleanupSnapshot[] -} - -function createMockLogger(): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger -} - -function createSyntheticOutputPlugin(snapshot: NativePluginCleanupSnapshot): OutputPlugin { - return { - type: PluginKind.Output, - name: snapshot.pluginName, - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return snapshot.outputs.map(output => ({path: output, source: {}})) - }, - async declareCleanupPaths(): Promise { - return { - ...snapshot.cleanup.delete != null ? {delete: [...snapshot.cleanup.delete] as OutputCleanupDeclarations['delete']} : {}, - ...snapshot.cleanup.protect != null ? {protect: [...snapshot.cleanup.protect] as OutputCleanupDeclarations['protect']} : {}, - ...snapshot.cleanup.excludeScanGlobs != null ? {excludeScanGlobs: [...snapshot.cleanup.excludeScanGlobs]} : {} - } - }, - async convertContent() { - return '' - } - } -} - -async function createSyntheticCleanContext(snapshot: NativeCleanupSnapshot): Promise { - const {mergeConfig} = await import('../src/config') - const workspaceDir = path.resolve(snapshot.workspaceDir) - const cleanupProtectionRules = snapshot.protectedRules.map(rule => ({ - path: rule.path, - protectionMode: rule.protectionMode, - reason: rule.reason, - matcher: rule.matcher ?? 'path' - })) - - if (snapshot.aindexDir != null) { - cleanupProtectionRules.push({ - path: snapshot.aindexDir, - protectionMode: 'direct', - reason: 'resolved aindex root', - matcher: 'path' - }) - } - - return { - logger: createMockLogger(), - fs, - path, - glob, - dryRun: false, - pluginOptions: mergeConfig({ - workspaceDir, - cleanupProtection: { - rules: cleanupProtectionRules - } - }), - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: snapshot.projectRoots.map(projectRoot => ({ - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: path.relative(workspaceDir, projectRoot) || '.', - basePath: workspaceDir, - getDirectoryName: () => path.basename(projectRoot), - getAbsolutePath: () => projectRoot - } - })) - } - } - } as unknown as OutputCleanContext -} - -async function planCleanup(snapshotJson: string): Promise { - const {collectDeletionTargets} = await import('./native-binding/cleanup') - const snapshot = JSON.parse(snapshotJson) as NativeCleanupSnapshot - const outputPlugins = snapshot.pluginSnapshots.map(createSyntheticOutputPlugin) - const cleanCtx = await createSyntheticCleanContext(snapshot) - const result = await collectDeletionTargets(outputPlugins, cleanCtx) - - return JSON.stringify({ - filesToDelete: result.filesToDelete, - dirsToDelete: result.dirsToDelete, - emptyDirsToDelete: result.emptyDirsToDelete, - violations: result.violations, - conflicts: result.conflicts, - excludedScanGlobs: result.excludedScanGlobs - }) -} - -async function runCleanup(snapshotJson: string): Promise { - const {performCleanup} = await import('./native-binding/cleanup') - const snapshot = JSON.parse(snapshotJson) as NativeCleanupSnapshot - const outputPlugins = snapshot.pluginSnapshots.map(createSyntheticOutputPlugin) - const cleanCtx = await createSyntheticCleanContext(snapshot) - const result = await performCleanup(outputPlugins, cleanCtx, createMockLogger()) - - return JSON.stringify({ - deletedFiles: result.deletedFiles, - deletedDirs: result.deletedDirs, - errors: result.errors.map(error => ({ - path: error.path, - kind: error.type, - error: error.error instanceof Error ? error.error.message : String(error.error) - })), - violations: result.violations, - conflicts: result.conflicts, - filesToDelete: [], - dirsToDelete: [], - emptyDirsToDelete: [], - excludedScanGlobs: [] - }) -} - -function resolveEffectiveIncludeSeries(topLevel?: readonly string[], typeSpecific?: readonly string[]): string[] { - if (topLevel == null && typeSpecific == null) return [] - return [...new Set([...topLevel ?? [], ...typeSpecific ?? []])] -} - -function matchesSeries(seriName: string | readonly string[] | null | undefined, effectiveIncludeSeries: readonly string[]): boolean { - if (seriName == null) return true - if (effectiveIncludeSeries.length === 0) return true - if (typeof seriName === 'string') return effectiveIncludeSeries.includes(seriName) - return seriName.some(name => effectiveIncludeSeries.includes(name)) -} - -function resolveSubSeries( - topLevel?: Readonly>, - typeSpecific?: Readonly> -): Record { - if (topLevel == null && typeSpecific == null) return {} - const merged: Record = {} - for (const [key, values] of Object.entries(topLevel ?? {})) merged[key] = [...values] - for (const [key, values] of Object.entries(typeSpecific ?? {})) { - const existingValues = merged[key] ?? [] - merged[key] = Object.hasOwn(merged, key) ? [...new Set([...existingValues, ...values])] : [...values] - } - return merged -} - -globalThis.__TNMSC_TEST_NATIVE_BINDING__ = { - getPlatformFixedDir: deskPaths.getPlatformFixedDir, - ensureDir: deskPaths.ensureDir, - existsSync: deskPaths.existsSync, - deletePathSync: deskPaths.deletePathSync, - writeFileSync: deskPaths.writeFileSync, - readFileSync: deskPaths.readFileSync, - deleteFiles: deskPaths.deleteFiles, - deleteDirectories: deskPaths.deleteDirectories, - deleteEmptyDirectories: deskPaths.deleteEmptyDirectories, - deleteTargets: deskPaths.deleteTargets, - planCleanup, - performCleanup: runCleanup, - resolveEffectiveIncludeSeries, - matchesSeries, - resolveSubSeries -} From bb462fe2256def0707f42591b97964ce3d58904a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:31:45 +0800 Subject: [PATCH 09/27] feat: add native binding cleanup functionality with support for file and directory deletion --- sdk/test/native-binding/cleanup.ts | 500 ++++++++++++++++++++++++++ sdk/test/native-binding/desk-paths.ts | 264 ++++++++++++++ sdk/test/setup-native-binding.ts | 215 +++++++++++ 3 files changed, 979 insertions(+) create mode 100644 sdk/test/native-binding/cleanup.ts create mode 100644 sdk/test/native-binding/desk-paths.ts create mode 100644 sdk/test/setup-native-binding.ts diff --git a/sdk/test/native-binding/cleanup.ts b/sdk/test/native-binding/cleanup.ts new file mode 100644 index 00000000..d1320e98 --- /dev/null +++ b/sdk/test/native-binding/cleanup.ts @@ -0,0 +1,500 @@ +import type { + ILogger, + OutputCleanContext, + OutputCleanupDeclarations, + OutputCleanupPathDeclaration, + OutputFileDeclaration, + OutputPlugin, + PluginOptions +} from '../../src/plugins/plugin-core' +import type {ProtectedPathRule, ProtectionMode, ProtectionRuleMatcher} from '../../src/ProtectedDeletionGuard' +import type {DeletionError} from './desk-paths' +import * as fs from 'node:fs' +import * as path from 'node:path' +import glob from 'fast-glob' +import {compactDeletionTargets} from '../../src/cleanup/delete-targets' +import {planWorkspaceEmptyDirectoryCleanup} from '../../src/cleanup/empty-directories' +import {buildDiagnostic, buildFileOperationDiagnostic, diagnosticLines} from '../../src/diagnostics' +import {collectAllPluginOutputs} from '../../src/plugins/plugin-core' +import { + buildComparisonKeys, + collectConfiguredAindexInputRules, + collectProjectRoots, + collectProtectedInputSourceRules, + createProtectedDeletionGuard, + logProtectedDeletionGuardError, + partitionDeletionTargets, + resolveAbsolutePath +} from '../../src/ProtectedDeletionGuard' +import {deleteEmptyDirectories, deleteTargets as deskDeleteTargets} from './desk-paths' + +/** + * Result of cleanup operation + */ +export interface CleanupResult { + readonly deletedFiles: number + readonly deletedDirs: number + readonly errors: readonly CleanupError[] + readonly violations: readonly import('../../src/ProtectedDeletionGuard').ProtectedPathViolation[] + readonly conflicts: readonly CleanupProtectionConflict[] + readonly message?: string +} + +/** + * Error during cleanup operation + */ +export interface CleanupError { + readonly path: string + readonly type: 'file' | 'directory' + readonly error: unknown +} + +export interface CleanupProtectionConflict { + readonly outputPath: string + readonly outputPlugin: string + readonly protectedPath: string + readonly protectionMode: ProtectionMode + readonly protectedBy: string + readonly reason: string +} + +export class CleanupProtectionConflictError extends Error { + readonly conflicts: readonly CleanupProtectionConflict[] + + constructor(conflicts: readonly CleanupProtectionConflict[]) { + super(buildCleanupProtectionConflictMessage(conflicts)) + this.name = 'CleanupProtectionConflictError' + this.conflicts = conflicts + } +} + +interface CleanupTargetCollections { + readonly filesToDelete: string[] + readonly dirsToDelete: string[] + readonly emptyDirsToDelete: string[] + readonly violations: readonly import('../../src/ProtectedDeletionGuard').ProtectedPathViolation[] + readonly conflicts: readonly CleanupProtectionConflict[] + readonly excludedScanGlobs: string[] +} + +const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS = ['**/node_modules/**', '**/.git/**', '**/.turbo/**', '**/.pnpm-store/**', '**/.yarn/**', '**/.next/**'] as const + +function normalizeGlobPattern(pattern: string): string { + return resolveAbsolutePath(pattern).replaceAll('\\', '/') +} + +function expandCleanupGlob(pattern: string, ignoreGlobs: readonly string[]): readonly string[] { + const normalizedPattern = normalizeGlobPattern(pattern) + return glob.sync(normalizedPattern, { + onlyFiles: false, + dot: true, + absolute: true, + followSymbolicLinks: false, + ignore: [...ignoreGlobs] + }) +} + +function shouldExcludeCleanupMatch(matchedPath: string, target: OutputCleanupPathDeclaration): boolean { + if (target.excludeBasenames == null || target.excludeBasenames.length === 0) return false + const basename = path.basename(matchedPath) + return target.excludeBasenames.includes(basename) +} + +async function collectPluginCleanupDeclarations(plugin: OutputPlugin, cleanCtx: OutputCleanContext): Promise { + if (plugin.declareCleanupPaths == null) return {} + return plugin.declareCleanupPaths({...cleanCtx, dryRun: true}) +} + +async function collectPluginCleanupSnapshot( + plugin: OutputPlugin, + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise<{ + readonly plugin: OutputPlugin + readonly outputs: Awaited> + readonly cleanup: OutputCleanupDeclarations +}> { + const existingOutputDeclarations = predeclaredOutputs?.get(plugin) + const [outputs, cleanup] = await Promise.all([ + existingOutputDeclarations != null ? Promise.resolve(existingOutputDeclarations) : plugin.declareOutputFiles({...cleanCtx, dryRun: true}), + collectPluginCleanupDeclarations(plugin, cleanCtx) + ]) + + return {plugin, outputs, cleanup} +} + +function buildCleanupProtectionConflictMessage(conflicts: readonly CleanupProtectionConflict[]): string { + const pathList = conflicts.map(conflict => conflict.outputPath).join(', ') + return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}` +} + +function detectCleanupProtectionConflicts( + outputPathOwners: ReadonlyMap, + guard: ReturnType +): CleanupProtectionConflict[] { + const conflicts: CleanupProtectionConflict[] = [] + + for (const [outputPath, outputPlugins] of outputPathOwners.entries()) { + const outputKeys = new Set(buildComparisonKeys(outputPath)) + + for (const rule of guard.compiledRules) { + const isExactMatch = rule.comparisonKeys.some(ruleKey => outputKeys.has(ruleKey)) + if (!isExactMatch) continue + + for (const outputPlugin of outputPlugins) { + conflicts.push({ + outputPath, + outputPlugin, + protectedPath: rule.path, + protectionMode: rule.protectionMode, + protectedBy: rule.source, + reason: rule.reason + }) + } + } + } + + return conflicts.sort((a, b) => { + const pathDiff = a.outputPath.localeCompare(b.outputPath) + if (pathDiff !== 0) return pathDiff + return a.protectedPath.localeCompare(b.protectedPath) + }) +} + +function logCleanupProtectionConflicts(logger: ILogger, conflicts: readonly CleanupProtectionConflict[]): void { + const firstConflict = conflicts[0] + + logger.error( + buildDiagnostic({ + code: 'CLEANUP_PROTECTION_CONFLICT_DETECTED', + title: 'Cleanup output paths conflict with protected inputs', + rootCause: diagnosticLines( + `tnmsc found ${conflicts.length} output path(s) that also match protected cleanup rules.`, + firstConflict == null + ? 'No conflict details were captured.' + : `Example conflict: "${firstConflict.outputPath}" is protected by "${firstConflict.protectedPath}".` + ), + exactFix: diagnosticLines('Separate generated output paths from protected source or reserved workspace paths before running cleanup again.'), + possibleFixes: [ + diagnosticLines('Update cleanup protect declarations so they do not overlap generated outputs.'), + diagnosticLines('Move the conflicting output target to a generated-only directory.') + ], + details: { + count: conflicts.length, + conflicts: conflicts.map(conflict => ({ + outputPath: conflict.outputPath, + outputPlugin: conflict.outputPlugin, + protectedPath: conflict.protectedPath, + protectionMode: conflict.protectionMode, + protectedBy: conflict.protectedBy, + reason: conflict.reason + })) + } + }) + ) +} + +/** + * Collect deletion targets from enabled output plugins. + */ +async function collectCleanupTargets( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise { + const deleteFiles = new Set() + const deleteDirs = new Set() + const protectedRules = new Map() + const excludeScanGlobSet = new Set(DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS) + const outputPathOwners = new Map() + + const pluginSnapshots = await Promise.all(outputPlugins.map(async plugin => collectPluginCleanupSnapshot(plugin, cleanCtx, predeclaredOutputs))) + + const addDeletePath = (rawPath: string, kind: 'file' | 'directory'): void => { + if (kind === 'directory') deleteDirs.add(resolveAbsolutePath(rawPath)) + else deleteFiles.add(resolveAbsolutePath(rawPath)) + } + + const addProtectRule = (rawPath: string, protectionMode: ProtectionMode, reason: string, source: string, matcher: ProtectionRuleMatcher = 'path'): void => { + const resolvedPath = resolveAbsolutePath(rawPath) + protectedRules.set(`${matcher}:${protectionMode}:${resolvedPath}`, { + path: resolvedPath, + protectionMode, + reason, + source, + matcher + }) + } + + const defaultProtectionModeForTarget = (target: OutputCleanupPathDeclaration): ProtectionMode => { + if (target.protectionMode != null) return target.protectionMode + return target.kind === 'file' ? 'direct' : 'recursive' + } + + for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) { + addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source) + } + if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) { + for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, { + workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path + })) { + addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source, rule.matcher) + } + } + + for (const rule of cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []) { + addProtectRule( + rule.path, + rule.protectionMode, + rule.reason ?? 'configured cleanup protection rule', + 'configured-cleanup-protection', + rule.matcher ?? 'path' + ) + } + + for (const snapshot of pluginSnapshots) { + for (const declaration of snapshot.outputs) { + const resolvedOutputPath = resolveAbsolutePath(declaration.path) + addDeletePath(resolvedOutputPath, 'file') + const existingOwners = outputPathOwners.get(resolvedOutputPath) + if (existingOwners == null) outputPathOwners.set(resolvedOutputPath, [snapshot.plugin.name]) + else if (!existingOwners.includes(snapshot.plugin.name)) existingOwners.push(snapshot.plugin.name) + } + for (const ignoreGlob of snapshot.cleanup.excludeScanGlobs ?? []) excludeScanGlobSet.add(normalizeGlobPattern(ignoreGlob)) + } + + const excludeScanGlobs = [...excludeScanGlobSet] + + const resolveDeleteGlob = (target: OutputCleanupPathDeclaration): void => { + for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) { + if (shouldExcludeCleanupMatch(matchedPath, target)) continue + + try { + const stat = fs.lstatSync(matchedPath) + if (stat.isDirectory()) addDeletePath(matchedPath, 'directory') + else addDeletePath(matchedPath, 'file') + } catch {} + } + } + + const resolveProtectGlob = (target: OutputCleanupPathDeclaration, pluginName: string): void => { + const protectionMode = defaultProtectionModeForTarget(target) + const reason = target.label != null ? `plugin cleanup protect declaration (${target.label})` : 'plugin cleanup protect declaration' + + for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) { + addProtectRule(matchedPath, protectionMode, reason, `plugin-cleanup-protect:${pluginName}`) + } + } + + for (const {plugin, cleanup} of pluginSnapshots) { + for (const target of cleanup.protect ?? []) { + if (target.kind === 'glob') { + resolveProtectGlob(target, plugin.name) + continue + } + addProtectRule( + target.path, + defaultProtectionModeForTarget(target), + target.label != null ? `plugin cleanup protect declaration (${target.label})` : 'plugin cleanup protect declaration', + `plugin-cleanup-protect:${plugin.name}` + ) + } + + for (const target of cleanup.delete ?? []) { + if (target.kind === 'glob') { + resolveDeleteGlob(target) + continue + } + if (target.kind === 'directory') addDeletePath(target.path, 'directory') + else addDeletePath(target.path, 'file') + } + } + + const guard = createProtectedDeletionGuard({ + workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path, + projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext), + rules: [...protectedRules.values()], + ...cleanCtx.collectedOutputContext.aindexDir != null ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir} : {} + }) + const conflicts = detectCleanupProtectionConflicts(outputPathOwners, guard) + if (conflicts.length > 0) throw new CleanupProtectionConflictError(conflicts) + const filePartition = partitionDeletionTargets([...deleteFiles], guard) + const dirPartition = partitionDeletionTargets([...deleteDirs], guard) + + const compactedTargets = compactDeletionTargets(filePartition.safePaths, dirPartition.safePaths) + const emptyDirectoryPlan = planWorkspaceEmptyDirectoryCleanup({ + fs, + path, + workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path, + filesToDelete: compactedTargets.files, + dirsToDelete: compactedTargets.dirs + }) + + return { + filesToDelete: compactedTargets.files, + dirsToDelete: compactedTargets.dirs, + emptyDirsToDelete: emptyDirectoryPlan.emptyDirsToDelete, + violations: [...filePartition.violations, ...dirPartition.violations].sort((a, b) => a.targetPath.localeCompare(b.targetPath)), + conflicts: [], + excludedScanGlobs: [...excludeScanGlobSet].sort((a, b) => a.localeCompare(b)) + } +} + +export async function collectDeletionTargets( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise<{ + filesToDelete: string[] + dirsToDelete: string[] + emptyDirsToDelete: string[] + violations: import('../../src/ProtectedDeletionGuard').ProtectedPathViolation[] + conflicts: CleanupProtectionConflict[] + excludedScanGlobs: string[] +}> { + const targets = await collectCleanupTargets(outputPlugins, cleanCtx, predeclaredOutputs) + return { + filesToDelete: targets.filesToDelete, + dirsToDelete: targets.dirsToDelete.sort((a, b) => a.localeCompare(b)), + emptyDirsToDelete: targets.emptyDirsToDelete.sort((a, b) => a.localeCompare(b)), + violations: [...targets.violations], + conflicts: [...targets.conflicts], + excludedScanGlobs: [...targets.excludedScanGlobs] + } +} + +function buildCleanupErrors(logger: ILogger, errors: readonly DeletionError[], type: 'file' | 'directory'): CleanupError[] { + return errors.map(currentError => { + const errorMessage = currentError.error instanceof Error ? currentError.error.message : String(currentError.error) + logger.warn( + buildFileOperationDiagnostic({ + code: type === 'file' ? 'CLEANUP_FILE_DELETE_FAILED' : 'CLEANUP_DIRECTORY_DELETE_FAILED', + title: type === 'file' ? 'Cleanup could not delete a file' : 'Cleanup could not delete a directory', + operation: 'delete', + targetKind: type, + path: currentError.path, + error: errorMessage, + details: { + phase: 'cleanup' + } + }) + ) + + return {path: currentError.path, type, error: currentError.error} + }) +} + +async function executeCleanupTargets( + targets: CleanupTargetCollections, + logger: ILogger +): Promise<{deletedFiles: number, deletedDirs: number, errors: CleanupError[]}> { + logger.debug('cleanup delete execution started', { + filesToDelete: targets.filesToDelete.length, + dirsToDelete: targets.dirsToDelete.length + targets.emptyDirsToDelete.length, + emptyDirsToDelete: targets.emptyDirsToDelete.length + }) + + const result = await deskDeleteTargets({ + files: targets.filesToDelete, + dirs: targets.dirsToDelete + }) + const emptyDirResult = await deleteEmptyDirectories(targets.emptyDirsToDelete) + + const fileErrors = buildCleanupErrors(logger, result.fileErrors, 'file') + const dirErrors = buildCleanupErrors(logger, [...result.dirErrors, ...emptyDirResult.errors], 'directory') + const allErrors = [...fileErrors, ...dirErrors] + + logger.debug('cleanup delete execution complete', { + deletedFiles: result.deletedFiles.length, + deletedDirs: result.deletedDirs.length + emptyDirResult.deletedPaths.length, + errors: allErrors.length + }) + + return { + deletedFiles: result.deletedFiles.length, + deletedDirs: result.deletedDirs.length + emptyDirResult.deletedPaths.length, + errors: allErrors + } +} + +function logCleanupPlanDiagnostics(logger: ILogger, targets: CleanupTargetCollections): void { + logger.debug('cleanup plan built', { + filesToDelete: targets.filesToDelete.length, + dirsToDelete: targets.dirsToDelete.length + targets.emptyDirsToDelete.length, + emptyDirsToDelete: targets.emptyDirsToDelete.length, + violations: targets.violations.length, + conflicts: targets.conflicts.length, + excludedScanGlobs: targets.excludedScanGlobs + }) +} + +/** + * Perform cleanup operation for output plugins. + * This is the main reusable cleanup function that can be called from both + * CleanCommand and ExecuteCommand (for pre-cleanup). + */ +export async function performCleanup( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + logger: ILogger, + predeclaredOutputs?: ReadonlyMap +): Promise { + if (predeclaredOutputs != null) { + const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx, predeclaredOutputs) + logger.debug('Collected outputs for cleanup', { + projectDirs: outputs.projectDirs.length, + projectFiles: outputs.projectFiles.length, + globalDirs: outputs.globalDirs.length, + globalFiles: outputs.globalFiles.length + }) + } + + let targets: CleanupTargetCollections + try { + targets = await collectCleanupTargets(outputPlugins, cleanCtx, predeclaredOutputs) + } catch (error) { + if (error instanceof CleanupProtectionConflictError) { + logCleanupProtectionConflicts(logger, error.conflicts) + return { + deletedFiles: 0, + deletedDirs: 0, + errors: [], + violations: [], + conflicts: error.conflicts, + message: error.message + } + } + throw error + } + const cleanupTargets: CleanupTargetCollections = { + filesToDelete: targets.filesToDelete, + dirsToDelete: targets.dirsToDelete, + emptyDirsToDelete: targets.emptyDirsToDelete, + violations: targets.violations, + conflicts: targets.conflicts, + excludedScanGlobs: targets.excludedScanGlobs + } + logCleanupPlanDiagnostics(logger, cleanupTargets) + + if (cleanupTargets.violations.length > 0) { + logProtectedDeletionGuardError(logger, 'cleanup', cleanupTargets.violations) + return { + deletedFiles: 0, + deletedDirs: 0, + errors: [], + violations: cleanupTargets.violations, + conflicts: [], + message: `Protected deletion guard blocked cleanup for ${cleanupTargets.violations.length} path(s)` + } + } + + const executionResult = await executeCleanupTargets(cleanupTargets, logger) + + return { + deletedFiles: executionResult.deletedFiles, + deletedDirs: executionResult.deletedDirs, + errors: executionResult.errors, + violations: [], + conflicts: [] + } +} diff --git a/sdk/test/native-binding/desk-paths.ts b/sdk/test/native-binding/desk-paths.ts new file mode 100644 index 00000000..243ac6bf --- /dev/null +++ b/sdk/test/native-binding/desk-paths.ts @@ -0,0 +1,264 @@ +import type {Buffer} from 'node:buffer' +import type {LoggerDiagnosticInput} from '../../src/plugins/plugin-core' +import * as fs from 'node:fs' +import path from 'node:path' +import process from 'node:process' +import {buildFileOperationDiagnostic} from '../../src/diagnostics' +import {resolveRuntimeEnvironment, resolveUserPath} from '../../src/runtime-environment' + +type PlatformFixedDir = 'win32' | 'darwin' | 'linux' + +function getLinuxDataDir(homeDir: string): string { + const xdgDataHome = process.env['XDG_DATA_HOME'] + if (typeof xdgDataHome === 'string' && xdgDataHome.trim().length > 0) return resolveUserPath(xdgDataHome) + return path.join(homeDir, '.local', 'share') +} + +export function getPlatformFixedDir(): string { + const runtimeEnvironment = resolveRuntimeEnvironment() + const platform = (runtimeEnvironment.isWsl ? 'win32' : runtimeEnvironment.platform) as PlatformFixedDir + const homeDir = runtimeEnvironment.effectiveHomeDir + + if (platform === 'win32') return resolveUserPath(process.env['LOCALAPPDATA'] ?? path.join(homeDir, 'AppData', 'Local')) + if (platform === 'darwin') return path.join(homeDir, 'Library', 'Application Support') + if (platform === 'linux') return getLinuxDataDir(homeDir) + + throw new Error(`Unsupported platform: ${process.platform}`) +} + +export function ensureDir(dir: string): void { + fs.mkdirSync(dir, {recursive: true}) +} + +export function existsSync(p: string): boolean { + return fs.existsSync(p) +} + +export function deletePathSync(p: string): void { + if (!fs.existsSync(p)) return + + const stat = fs.lstatSync(p) + if (stat.isSymbolicLink()) { + if (process.platform === 'win32') fs.rmSync(p, {recursive: true, force: true}) + else fs.unlinkSync(p) + } else if (stat.isDirectory()) fs.rmSync(p, {recursive: true, force: true}) + else fs.unlinkSync(p) +} + +export function writeFileSync(filePath: string, data: string | Buffer, encoding: BufferEncoding = 'utf8'): void { + const parentDir = path.dirname(filePath) + ensureDir(parentDir) + if (typeof data === 'string') fs.writeFileSync(filePath, data, encoding) + else fs.writeFileSync(filePath, data) +} + +export function readFileSync(filePath: string, encoding: BufferEncoding = 'utf8'): string { + try { + return fs.readFileSync(filePath, encoding) + } catch (error) { + const msg = error instanceof Error ? error.message : String(error) + throw new Error(`Failed to read file "${filePath}": ${msg}`) + } +} + +export interface DeletionError { + readonly path: string + readonly error: unknown +} + +export interface DeletionResult { + readonly deleted: number + readonly deletedPaths: readonly string[] + readonly errors: readonly DeletionError[] +} + +export interface DeleteTargetsResult { + readonly deletedFiles: readonly string[] + readonly deletedDirs: readonly string[] + readonly fileErrors: readonly DeletionError[] + readonly dirErrors: readonly DeletionError[] +} + +const DELETE_CONCURRENCY = 32 + +async function deletePath(p: string): Promise { + try { + const stat = await fs.promises.lstat(p) + if (stat.isSymbolicLink()) { + await (process.platform === 'win32' ? fs.promises.rm(p, {recursive: true, force: true}) : fs.promises.unlink(p)) + return true + } + + if (stat.isDirectory()) { + await fs.promises.rm(p, {recursive: true, force: true}) + return true + } + + await fs.promises.unlink(p) + return true + } catch (error) { + if ((error as NodeJS.ErrnoException).code === 'ENOENT') return false + throw error + } +} + +async function deleteEmptyDirectory(p: string): Promise { + try { + await fs.promises.rmdir(p) + return true + } catch (error) { + const {code} = error as NodeJS.ErrnoException + if (code === 'ENOENT' || code === 'ENOTEMPTY') return false + throw error + } +} + +async function mapWithConcurrencyLimit(items: readonly T[], concurrency: number, worker: (item: T) => Promise): Promise { + if (items.length === 0) return [] + + const results: TResult[] = [] + let nextIndex = 0 + + const runWorker = async (): Promise => { + while (true) { + const currentIndex = nextIndex + if (currentIndex >= items.length) return + + nextIndex += 1 + results[currentIndex] = await worker(items[currentIndex] as T) + } + } + + const workerCount = Math.min(concurrency, items.length) + const workers: Promise[] = [] + for (let index = 0; index < workerCount; index += 1) { + workers.push(runWorker()) + } + await Promise.all(workers) + + return results +} + +async function deletePaths(paths: readonly string[], options?: {readonly sortByDepthDescending?: boolean}): Promise { + const sortedPaths = options?.sortByDepthDescending === true ? [...paths].sort((a, b) => b.length - a.length || b.localeCompare(a)) : [...paths] + + const results = await mapWithConcurrencyLimit(sortedPaths, DELETE_CONCURRENCY, async currentPath => { + try { + const deleted = await deletePath(currentPath) + return {path: currentPath, deleted} + } catch (error) { + return {path: currentPath, error} + } + }) + + const deletedPaths: string[] = [] + const errors: DeletionError[] = [] + + for (const result of results) { + if ('error' in result) { + errors.push({path: result.path, error: result.error}) + continue + } + + if (result.deleted) deletedPaths.push(result.path) + } + + return { + deleted: deletedPaths.length, + deletedPaths, + errors + } +} + +export async function deleteFiles(files: readonly string[]): Promise { + return deletePaths(files) +} + +export async function deleteDirectories(dirs: readonly string[]): Promise { + return deletePaths(dirs, {sortByDepthDescending: true}) +} + +export async function deleteEmptyDirectories(dirs: readonly string[]): Promise { + const sortedPaths = [...dirs].sort((a, b) => b.length - a.length || b.localeCompare(a)) + const deletedPaths: string[] = [] + const errors: DeletionError[] = [] + + for (const currentPath of sortedPaths) { + try { + const deleted = await deleteEmptyDirectory(currentPath) + if (deleted) deletedPaths.push(currentPath) + } catch (error) { + errors.push({path: currentPath, error}) + } + } + + return { + deleted: deletedPaths.length, + deletedPaths, + errors + } +} + +export async function deleteTargets(targets: {readonly files?: readonly string[], readonly dirs?: readonly string[]}): Promise { + const [fileResult, dirResult] = await Promise.all([deleteFiles(targets.files ?? []), deleteDirectories(targets.dirs ?? [])]) + + return { + deletedFiles: fileResult.deletedPaths, + deletedDirs: dirResult.deletedPaths, + fileErrors: fileResult.errors, + dirErrors: dirResult.errors + } +} + +export interface WriteLogger { + readonly trace: (data: object) => void + readonly error: (diagnostic: LoggerDiagnosticInput) => void +} + +export interface SafeWriteOptions { + readonly fullPath: string + readonly content: string | Buffer + readonly type: string + readonly relativePath: string + readonly dryRun: boolean + readonly logger: WriteLogger +} + +export interface SafeWriteResult { + readonly path: string + readonly success: boolean + readonly skipped?: boolean + readonly error?: Error +} + +export function writeFileSafe(options: SafeWriteOptions): SafeWriteResult { + const {fullPath, content, type, relativePath, dryRun, logger} = options + + if (dryRun) { + logger.trace({action: 'dryRun', type, path: fullPath}) + return {path: relativePath, success: true, skipped: false} + } + + try { + writeFileSync(fullPath, content) + logger.trace({action: 'write', type, path: fullPath}) + return {path: relativePath, success: true} + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error) + logger.error( + buildFileOperationDiagnostic({ + code: 'OUTPUT_FILE_WRITE_FAILED', + title: `Failed to write ${type} output`, + operation: 'write', + targetKind: `${type} output file`, + path: fullPath, + error: errMsg, + details: { + relativePath, + type + } + }) + ) + return {path: relativePath, success: false, error: error as Error} + } +} diff --git a/sdk/test/setup-native-binding.ts b/sdk/test/setup-native-binding.ts new file mode 100644 index 00000000..48fa5e04 --- /dev/null +++ b/sdk/test/setup-native-binding.ts @@ -0,0 +1,215 @@ +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../src/plugins/plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import glob from 'fast-glob' +import {FilePathKind, PluginKind} from '../src/plugins/plugin-core/enums' +import * as deskPaths from './native-binding/desk-paths' + +interface NativeCleanupTarget { + readonly path: string + readonly kind: 'file' | 'directory' | 'glob' + readonly excludeBasenames?: readonly string[] + readonly protectionMode?: 'direct' | 'recursive' + readonly scope?: string + readonly label?: string +} + +interface NativeCleanupDeclarations { + readonly delete?: readonly NativeCleanupTarget[] + readonly protect?: readonly NativeCleanupTarget[] + readonly excludeScanGlobs?: readonly string[] +} + +interface NativePluginCleanupSnapshot { + readonly pluginName: string + readonly outputs: readonly string[] + readonly cleanup: NativeCleanupDeclarations +} + +interface NativeProtectedRule { + readonly path: string + readonly protectionMode: 'direct' | 'recursive' + readonly reason: string + readonly source: string + readonly matcher?: 'path' | 'glob' +} + +interface NativeCleanupSnapshot { + readonly workspaceDir: string + readonly aindexDir?: string + readonly projectRoots: readonly string[] + readonly protectedRules: readonly NativeProtectedRule[] + readonly pluginSnapshots: readonly NativePluginCleanupSnapshot[] +} + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger +} + +function createSyntheticOutputPlugin(snapshot: NativePluginCleanupSnapshot): OutputPlugin { + return { + type: PluginKind.Output, + name: snapshot.pluginName, + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return snapshot.outputs.map(output => ({path: output, source: {}})) + }, + async declareCleanupPaths(): Promise { + return { + ...snapshot.cleanup.delete != null ? {delete: [...snapshot.cleanup.delete] as OutputCleanupDeclarations['delete']} : {}, + ...snapshot.cleanup.protect != null ? {protect: [...snapshot.cleanup.protect] as OutputCleanupDeclarations['protect']} : {}, + ...snapshot.cleanup.excludeScanGlobs != null ? {excludeScanGlobs: [...snapshot.cleanup.excludeScanGlobs]} : {} + } + }, + async convertContent() { + return '' + } + } +} + +async function createSyntheticCleanContext(snapshot: NativeCleanupSnapshot): Promise { + const {mergeConfig} = await import('../src/config') + const workspaceDir = path.resolve(snapshot.workspaceDir) + const cleanupProtectionRules = snapshot.protectedRules.map(rule => ({ + path: rule.path, + protectionMode: rule.protectionMode, + reason: rule.reason, + matcher: rule.matcher ?? 'path' + })) + + if (snapshot.aindexDir != null) { + cleanupProtectionRules.push({ + path: snapshot.aindexDir, + protectionMode: 'direct', + reason: 'resolved aindex root', + matcher: 'path' + }) + } + + return { + logger: createMockLogger(), + fs, + path, + glob, + dryRun: false, + pluginOptions: mergeConfig({ + workspaceDir, + cleanupProtection: { + rules: cleanupProtectionRules + } + }), + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: snapshot.projectRoots.map(projectRoot => ({ + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: path.relative(workspaceDir, projectRoot) || '.', + basePath: workspaceDir, + getDirectoryName: () => path.basename(projectRoot), + getAbsolutePath: () => projectRoot + } + })) + } + } + } as unknown as OutputCleanContext +} + +async function planCleanup(snapshotJson: string): Promise { + const {collectDeletionTargets} = await import('./native-binding/cleanup') + const snapshot = JSON.parse(snapshotJson) as NativeCleanupSnapshot + const outputPlugins = snapshot.pluginSnapshots.map(createSyntheticOutputPlugin) + const cleanCtx = await createSyntheticCleanContext(snapshot) + const result = await collectDeletionTargets(outputPlugins, cleanCtx) + + return JSON.stringify({ + filesToDelete: result.filesToDelete, + dirsToDelete: result.dirsToDelete, + emptyDirsToDelete: result.emptyDirsToDelete, + violations: result.violations, + conflicts: result.conflicts, + excludedScanGlobs: result.excludedScanGlobs + }) +} + +async function runCleanup(snapshotJson: string): Promise { + const {performCleanup} = await import('./native-binding/cleanup') + const snapshot = JSON.parse(snapshotJson) as NativeCleanupSnapshot + const outputPlugins = snapshot.pluginSnapshots.map(createSyntheticOutputPlugin) + const cleanCtx = await createSyntheticCleanContext(snapshot) + const result = await performCleanup(outputPlugins, cleanCtx, createMockLogger()) + + return JSON.stringify({ + deletedFiles: result.deletedFiles, + deletedDirs: result.deletedDirs, + errors: result.errors.map(error => ({ + path: error.path, + kind: error.type, + error: error.error instanceof Error ? error.error.message : String(error.error) + })), + violations: result.violations, + conflicts: result.conflicts, + filesToDelete: [], + dirsToDelete: [], + emptyDirsToDelete: [], + excludedScanGlobs: [] + }) +} + +function resolveEffectiveIncludeSeries(topLevel?: readonly string[], typeSpecific?: readonly string[]): string[] { + if (topLevel == null && typeSpecific == null) return [] + return [...new Set([...topLevel ?? [], ...typeSpecific ?? []])] +} + +function matchesSeries(seriName: string | readonly string[] | null | undefined, effectiveIncludeSeries: readonly string[]): boolean { + if (seriName == null) return true + if (effectiveIncludeSeries.length === 0) return true + if (typeof seriName === 'string') return effectiveIncludeSeries.includes(seriName) + return seriName.some(name => effectiveIncludeSeries.includes(name)) +} + +function resolveSubSeries( + topLevel?: Readonly>, + typeSpecific?: Readonly> +): Record { + if (topLevel == null && typeSpecific == null) return {} + const merged: Record = {} + for (const [key, values] of Object.entries(topLevel ?? {})) merged[key] = [...values] + for (const [key, values] of Object.entries(typeSpecific ?? {})) { + const existingValues = merged[key] ?? [] + merged[key] = Object.hasOwn(merged, key) ? [...new Set([...existingValues, ...values])] : [...values] + } + return merged +} + +globalThis.__TNMSC_TEST_NATIVE_BINDING__ = { + getPlatformFixedDir: deskPaths.getPlatformFixedDir, + ensureDir: deskPaths.ensureDir, + existsSync: deskPaths.existsSync, + deletePathSync: deskPaths.deletePathSync, + writeFileSync: deskPaths.writeFileSync, + readFileSync: deskPaths.readFileSync, + deleteFiles: deskPaths.deleteFiles, + deleteDirectories: deskPaths.deleteDirectories, + deleteEmptyDirectories: deskPaths.deleteEmptyDirectories, + deleteTargets: deskPaths.deleteTargets, + planCleanup, + performCleanup: runCleanup, + resolveEffectiveIncludeSeries, + matchesSeries, + resolveSubSeries +} From 7352ebc00f5349b8d15f2f1aee7319063119b921 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:31:53 +0800 Subject: [PATCH 10/27] feat: add script to generate JSON schema from TNMSC_JSON_SCHEMA --- sdk/scripts/benchmark-cleanup.ts | 147 ++++++++++++++++++++++++++++ sdk/scripts/cleanup-native-smoke.ts | 145 +++++++++++++++++++++++++++ sdk/scripts/finalize-bundle.ts | 143 +++++++++++++++++++++++++++ sdk/scripts/generate-schema.ts | 5 + 4 files changed, 440 insertions(+) create mode 100644 sdk/scripts/benchmark-cleanup.ts create mode 100644 sdk/scripts/cleanup-native-smoke.ts create mode 100644 sdk/scripts/finalize-bundle.ts create mode 100644 sdk/scripts/generate-schema.ts diff --git a/sdk/scripts/benchmark-cleanup.ts b/sdk/scripts/benchmark-cleanup.ts new file mode 100644 index 00000000..f271a363 --- /dev/null +++ b/sdk/scripts/benchmark-cleanup.ts @@ -0,0 +1,147 @@ +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../src/plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {performance} from 'node:perf_hooks' +import glob from 'fast-glob' + +process.env['TNMSC_FORCE_NATIVE_BINDING'] = '1' +delete process.env['VITEST'] +delete process.env['VITEST_WORKER_ID'] + +const cleanupModule = await import('../src/commands/CleanupUtils') +const pluginCore = await import('../src/plugins/plugin-core') + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger +} + +function createCleanContext(workspaceDir: string): OutputCleanContext { + return { + logger: createMockLogger(), + fs, + path, + glob, + collectedOutputContext: { + workspace: { + directory: { + pathKind: pluginCore.FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: Array.from({length: 40}, (_, index) => ({ + dirFromWorkspacePath: { + pathKind: pluginCore.FilePathKind.Relative, + path: `project-${index}`, + basePath: workspaceDir, + getDirectoryName: () => `project-${index}`, + getAbsolutePath: () => path.join(workspaceDir, `project-${index}`) + } + })) + }, + aindexDir: path.join(workspaceDir, 'aindex') + } + } as OutputCleanContext +} + +function createBenchmarkPlugin(workspaceDir: string): OutputPlugin { + return { + type: pluginCore.PluginKind.Output, + name: 'BenchmarkOutputPlugin', + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return Array.from({length: 40}, (_, projectIndex) => ([ + {path: path.join(workspaceDir, `project-${projectIndex}`, 'AGENTS.md'), source: {}}, + {path: path.join(workspaceDir, `project-${projectIndex}`, 'commands', 'AGENTS.md'), source: {}} + ])).flat() + }, + async declareCleanupPaths(): Promise { + return { + delete: [{ + kind: 'glob', + path: path.join(workspaceDir, '.codex', 'skills', '*'), + excludeBasenames: ['.system'] + }, { + kind: 'glob', + path: path.join(workspaceDir, '.claude', '**', 'CLAUDE.md') + }], + protect: [{ + kind: 'directory', + path: path.join(workspaceDir, '.codex', 'skills', '.system'), + protectionMode: 'recursive' + }] + } + }, + async convertContent() { + return 'benchmark' + } + } +} + +async function measure(label: string, iterations: number, run: () => Promise): Promise { + const start = performance.now() + for (let index = 0; index < iterations; index += 1) { + await run() + } + const total = performance.now() - start + const average = total / iterations + process.stdout.write(`${label}: total=${total.toFixed(2)}ms avg=${average.toFixed(2)}ms\n`) + return average +} + +async function main(): Promise { + if (!cleanupModule.hasNativeCleanupBinding()) { + throw new Error('Native cleanup binding is unavailable. Build the sdk NAPI module first.') + } + + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-benchmark-cleanup-')) + const workspaceDir = path.join(tempDir, 'workspace') + + try { + for (let projectIndex = 0; projectIndex < 40; projectIndex += 1) { + const rootFile = path.join(workspaceDir, `project-${projectIndex}`, 'AGENTS.md') + const childFile = path.join(workspaceDir, `project-${projectIndex}`, 'commands', 'AGENTS.md') + fs.mkdirSync(path.dirname(childFile), {recursive: true}) + fs.writeFileSync(rootFile, '# root', 'utf8') + fs.writeFileSync(childFile, '# child', 'utf8') + } + + const skillsDir = path.join(workspaceDir, '.codex', 'skills') + fs.mkdirSync(path.join(skillsDir, '.system'), {recursive: true}) + for (let index = 0; index < 80; index += 1) { + const skillDir = path.join(skillsDir, `legacy-${index}`) + fs.mkdirSync(skillDir, {recursive: true}) + fs.writeFileSync(path.join(skillDir, 'SKILL.md'), '# stale', 'utf8') + } + + for (let index = 0; index < 40; index += 1) { + const claudeFile = path.join(workspaceDir, '.claude', `project-${index}`, 'CLAUDE.md') + fs.mkdirSync(path.dirname(claudeFile), {recursive: true}) + fs.writeFileSync(claudeFile, '# claude', 'utf8') + } + + const plugin = createBenchmarkPlugin(workspaceDir) + const cleanCtx = createCleanContext(workspaceDir) + const iterations = 25 + + process.stdout.write(`cleanup benchmark iterations=${iterations}\n`) + await measure('native-plan', iterations, async () => { + await cleanupModule.collectDeletionTargets([plugin], cleanCtx) + }) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } +} + +await main() diff --git a/sdk/scripts/cleanup-native-smoke.ts b/sdk/scripts/cleanup-native-smoke.ts new file mode 100644 index 00000000..13f5a3bb --- /dev/null +++ b/sdk/scripts/cleanup-native-smoke.ts @@ -0,0 +1,145 @@ +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../src/plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' + +process.env['TNMSC_FORCE_NATIVE_BINDING'] = '1' +delete process.env['VITEST'] +delete process.env['VITEST_WORKER_ID'] + +const cleanupModule = await import('../src/commands/CleanupUtils') +const pluginCore = await import('../src/plugins/plugin-core') + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger +} + +function createCleanContext(workspaceDir: string): OutputCleanContext { + return { + logger: createMockLogger(), + fs, + path, + glob, + collectedOutputContext: { + workspace: { + directory: { + pathKind: pluginCore.FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [{ + dirFromWorkspacePath: { + pathKind: pluginCore.FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + } + }] + }, + aindexDir: path.join(workspaceDir, 'aindex') + } + } as OutputCleanContext +} + +function createSmokePlugin(workspaceDir: string): OutputPlugin { + return { + type: pluginCore.PluginKind.Output, + name: 'SmokeOutputPlugin', + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return [ + {path: path.join(workspaceDir, 'project-a', 'AGENTS.md'), source: {}}, + {path: path.join(workspaceDir, 'project-a', 'commands', 'AGENTS.md'), source: {}} + ] + }, + async declareCleanupPaths(): Promise { + return { + delete: [{ + kind: 'glob', + path: path.join(workspaceDir, '.codex', 'skills', '*'), + excludeBasenames: ['.system'] + }] + } + }, + async convertContent() { + return 'smoke' + } + } +} + +async function main(): Promise { + if (!cleanupModule.hasNativeCleanupBinding()) { + throw new Error('Native cleanup binding is unavailable. Build the sdk NAPI module first.') + } + + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-native-cleanup-smoke-')) + const workspaceDir = path.join(tempDir, 'workspace') + const legacySkillDir = path.join(workspaceDir, '.codex', 'skills', 'legacy') + const preservedSkillDir = path.join(workspaceDir, '.codex', 'skills', '.system') + const rootOutput = path.join(workspaceDir, 'project-a', 'AGENTS.md') + const childOutput = path.join(workspaceDir, 'project-a', 'commands', 'AGENTS.md') + + fs.mkdirSync(path.dirname(rootOutput), {recursive: true}) + fs.mkdirSync(path.dirname(childOutput), {recursive: true}) + fs.mkdirSync(legacySkillDir, {recursive: true}) + fs.mkdirSync(preservedSkillDir, {recursive: true}) + fs.writeFileSync(rootOutput, '# root', 'utf8') + fs.writeFileSync(childOutput, '# child', 'utf8') + fs.writeFileSync(path.join(legacySkillDir, 'SKILL.md'), '# stale', 'utf8') + fs.writeFileSync(path.join(preservedSkillDir, 'SKILL.md'), '# keep', 'utf8') + + try { + const plugin = createSmokePlugin(workspaceDir) + const cleanCtx = createCleanContext(workspaceDir) + + const nativePlan = await cleanupModule.collectDeletionTargets([plugin], cleanCtx) + expectSetEqual(nativePlan.filesToDelete, [rootOutput, childOutput], 'native cleanup plan files') + expectSetEqual(nativePlan.dirsToDelete, [ + legacySkillDir, + path.join(workspaceDir, 'project-a', 'commands'), + path.join(workspaceDir, 'project-a') + ], 'native cleanup plan directories') + if (nativePlan.violations.length > 0 || nativePlan.conflicts.length > 0) { + throw new Error(`Unexpected native cleanup plan: ${JSON.stringify(nativePlan, null, 2)}`) + } + + const result = await cleanupModule.performCleanup([plugin], cleanCtx, createMockLogger()) + if (result.deletedFiles !== 2 || result.deletedDirs !== 3 || result.errors.length > 0) { + throw new Error(`Unexpected native cleanup result: ${JSON.stringify(result, null, 2)}`) + } + + if (fs.existsSync(rootOutput) || fs.existsSync(childOutput) || fs.existsSync(legacySkillDir)) { + throw new Error('Native cleanup did not remove the expected outputs') + } + if (!fs.existsSync(preservedSkillDir)) { + throw new Error('Native cleanup removed the preserved .system skill directory') + } + + process.stdout.write('cleanup-native-smoke: ok\n') + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } +} + +function expectSetEqual(actual: readonly string[], expected: readonly string[], label: string): void { + const actualSorted = [...actual].sort() + const expectedSorted = [...expected].sort() + if (JSON.stringify(actualSorted) !== JSON.stringify(expectedSorted)) { + throw new Error(`Unexpected ${label}: ${JSON.stringify(actualSorted)} !== ${JSON.stringify(expectedSorted)}`) + } +} + +await main() diff --git a/sdk/scripts/finalize-bundle.ts b/sdk/scripts/finalize-bundle.ts new file mode 100644 index 00000000..d53fa142 --- /dev/null +++ b/sdk/scripts/finalize-bundle.ts @@ -0,0 +1,143 @@ +import {spawnSync} from 'node:child_process' +import {copyFileSync, existsSync, mkdtempSync, readdirSync, rmSync, writeFileSync} from 'node:fs' +import {tmpdir} from 'node:os' +import {dirname, join, resolve} from 'node:path' +import {fileURLToPath, pathToFileURL} from 'node:url' + +const scriptDir = dirname(fileURLToPath(import.meta.url)) +const cliDir = resolve(scriptDir, '..') +const distDir = resolve(cliDir, 'dist') +const indexEntryPath = resolve(distDir, 'index.mjs') +const bundledJitiBabelRuntimeSourcePath = resolve(cliDir, 'node_modules', 'jiti', 'dist', 'babel.cjs') +const bundledJitiBabelRuntimeTargetPath = resolve(distDir, 'babel.cjs') + +function getCombinedOutput(stdout?: string | null, stderr?: string | null): string { + return `${stdout ?? ''}${stderr ?? ''}`.trim() +} + +function runNodeProcess( + args: readonly string[], + options?: { + readonly env?: NodeJS.ProcessEnv + } +) { + return spawnSync(process.execPath, [...args], { + cwd: cliDir, + encoding: 'utf8', + ...options?.env != null && {env: options.env} + }) +} + +function assertProcessSucceeded( + result: ReturnType, + lines: readonly string[] +): void { + if (result.error != null) { + throw result.error + } + + if (result.status === 0) { + return + } + + const combinedOutput = getCombinedOutput(result.stdout, result.stderr) + throw new Error([ + ...lines, + combinedOutput.length === 0 ? 'No output captured.' : combinedOutput + ].join('\n')) +} + +function withTempDir(prefix: string, callback: (tempDir: string) => T): T { + const tempDir = mkdtempSync(join(tmpdir(), prefix)) + + try { + return callback(tempDir) + } + finally { + rmSync(tempDir, {recursive: true, force: true}) + } +} + +function ensureIndexBundleExists(): void { + if (existsSync(indexEntryPath)) return + throw new Error(`Expected bundled CLI entry at "${indexEntryPath}" before finalizing bundle assets.`) +} + +function findBundledJitiChunkPath(): string | undefined { + const bundledJitiChunkName = readdirSync(distDir) + .find(fileName => /^jiti-.*\.mjs$/u.test(fileName)) + + return bundledJitiChunkName == null ? void 0 : resolve(distDir, bundledJitiChunkName) +} + +function ensureBundledJitiRuntimeAssets(): string | undefined { + const bundledJitiChunkPath = findBundledJitiChunkPath() + if (bundledJitiChunkPath == null) return void 0 + + if (!existsSync(bundledJitiBabelRuntimeSourcePath)) { + throw new Error( + `Bundled jiti chunk "${bundledJitiChunkPath}" requires "${bundledJitiBabelRuntimeSourcePath}", but it does not exist.` + ) + } + + copyFileSync(bundledJitiBabelRuntimeSourcePath, bundledJitiBabelRuntimeTargetPath) + return bundledJitiChunkPath +} + +function smokeTestBundledJitiTransform(bundledJitiChunkPath: string | undefined): void { + if (bundledJitiChunkPath == null) return + + withTempDir('tnmsc-bundled-jiti-', tempDir => { + const probeModulePath = join(tempDir, 'probe.ts') + const probeRunnerPath = join(tempDir, 'probe-runner.mjs') + + writeFileSync(probeModulePath, 'export default {ok: true}\n', 'utf8') + writeFileSync(probeRunnerPath, [ + "import {pathToFileURL} from 'node:url'", + '', + 'const [, , bundledJitiChunkPathArg, probeModulePathArg] = process.argv', + '', + 'const {createJiti} = await import(pathToFileURL(bundledJitiChunkPathArg).href)', + 'const runtime = createJiti(import.meta.url, {', + ' fsCache: false,', + ' moduleCache: false,', + ' interopDefault: false', + '})', + 'const loaded = await runtime.import(probeModulePathArg)', + '', + 'if (loaded.default?.ok !== true) {', + " throw new Error('Bundled jiti smoke test loaded an unexpected module shape.')", + '}', + '' + ].join('\n'), 'utf8') + + const smokeTest = runNodeProcess([probeRunnerPath, bundledJitiChunkPath, probeModulePath]) + assertProcessSucceeded(smokeTest, [ + `Bundled jiti chunk "${pathToFileURL(bundledJitiChunkPath).href}" failed the transform smoke test.` + ]) + }) +} + +function smokeTestCliEntry(): void { + withTempDir('tnmsc-index-entry-home-', isolatedHomeDir => { + const smokeTest = runNodeProcess([indexEntryPath, '--version'], { + env: { + ...process.env, + HOME: isolatedHomeDir, + USERPROFILE: isolatedHomeDir + } + }) + + assertProcessSucceeded(smokeTest, [ + `Bundled CLI entry "${indexEntryPath}" failed the runtime smoke test.`, + `Exit code: ${smokeTest.status ?? 'unknown'}` + ]) + }) +} + +ensureIndexBundleExists() +const bundledJitiChunkPath = ensureBundledJitiRuntimeAssets() +smokeTestBundledJitiTransform(bundledJitiChunkPath) +smokeTestCliEntry() + +console.log(`Finalized bundled CLI assets for ${indexEntryPath}`) diff --git a/sdk/scripts/generate-schema.ts b/sdk/scripts/generate-schema.ts new file mode 100644 index 00000000..b8c124dc --- /dev/null +++ b/sdk/scripts/generate-schema.ts @@ -0,0 +1,5 @@ +import {writeFileSync} from 'node:fs' +import {TNMSC_JSON_SCHEMA} from '../src/schema.ts' + +writeFileSync('./dist/tnmsc.schema.json', `${JSON.stringify(TNMSC_JSON_SCHEMA, null, 2)}\n`, 'utf8') +console.log('Schema generated successfully!') From f99552029967a3da3097a9a3280037f08fdf78c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:32:21 +0800 Subject: [PATCH 11/27] feat: add WSL mirror sync functionality - Implemented WslMirrorSync to synchronize files between Windows and WSL. - Added support for discovering WSL instances and resolving their home directories. - Introduced error handling for unavailable WSL instances and missing source files. - Created utility functions for managing file paths and handling output from WSL commands. - Enhanced logging for better visibility during the sync process. --- sdk/src/Aindex.ts | 161 ++ sdk/src/ConfigLoader.test.ts | 67 + sdk/src/ConfigLoader.ts | 473 ++++ sdk/src/PluginPipeline.test.ts | 60 + sdk/src/PluginPipeline.ts | 101 + sdk/src/ProtectedDeletionGuard.ts | 612 +++++ sdk/src/aindex-config/AindexProjectConfig.ts | 29 + .../AindexProjectConfigLoader.ts | 88 + sdk/src/aindex-config/index.ts | 2 + sdk/src/aindex-project-series.ts | 72 + sdk/src/bridge/mod.rs | 3 + sdk/src/bridge/node.rs | 555 ++++ sdk/src/cleanup/delete-targets.ts | 71 + sdk/src/cleanup/empty-directories.ts | 114 + sdk/src/cli-runtime.test.ts | 67 + sdk/src/cli-runtime.ts | 106 + sdk/src/commands/CleanCommand.ts | 34 + sdk/src/commands/CleanupUtils.adapter.test.ts | 156 ++ sdk/src/commands/CleanupUtils.test.ts | 782 ++++++ sdk/src/commands/CleanupUtils.ts | 462 ++++ sdk/src/commands/Command.ts | 95 + sdk/src/commands/CommandFactory.ts | 29 + sdk/src/commands/CommandRegistry.ts | 43 + sdk/src/commands/CommandUtils.ts | 70 + sdk/src/commands/ConfigCommand.ts | 237 ++ sdk/src/commands/ConfigShowCommand.ts | 48 + sdk/src/commands/DryRunCleanCommand.ts | 74 + sdk/src/commands/DryRunOutputCommand.ts | 51 + sdk/src/commands/ExecuteCommand.ts | 79 + sdk/src/commands/HelpCommand.ts | 77 + sdk/src/commands/InitCommand.test.ts | 78 + sdk/src/commands/InitCommand.ts | 36 + sdk/src/commands/JsonOutputCommand.ts | 56 + sdk/src/commands/PluginsCommand.ts | 54 + .../ProtectedDeletionCommands.test.ts | 277 ++ sdk/src/commands/SetCommand.ts | 0 sdk/src/commands/UnknownCommand.ts | 34 + sdk/src/commands/VersionCommand.ts | 29 + sdk/src/commands/bridge.rs | 23 + sdk/src/commands/config_cmd.rs | 108 + sdk/src/commands/config_show.rs | 44 + .../commands/factories/CleanCommandFactory.ts | 20 + .../factories/ConfigCommandFactory.ts | 29 + .../factories/DryRunCommandFactory.ts | 19 + .../factories/ExecuteCommandFactory.ts | 20 + .../commands/factories/HelpCommandFactory.ts | 22 + .../commands/factories/InitCommandFactory.ts | 15 + .../factories/PluginsCommandFactory.ts | 19 + .../factories/UnknownCommandFactory.ts | 22 + .../factories/VersionCommandFactory.ts | 22 + sdk/src/commands/help.rs | 26 + sdk/src/commands/mod.rs | 5 + sdk/src/commands/version.rs | 6 + sdk/src/config.outputScopes.test.ts | 45 + sdk/src/config.plugins-fast-path.test.ts | 50 + sdk/src/config.test.ts | 173 ++ sdk/src/config.ts | 475 ++++ sdk/src/core/cleanup.rs | 2309 +++++++++++++++++ sdk/src/core/config/mod.rs | 1513 +++++++++++ sdk/src/core/config/series_filter.rs | 228 ++ sdk/src/core/desk-paths.ts | 179 ++ sdk/src/core/desk_paths.rs | 623 +++++ sdk/src/core/input_plugins.rs | 9 + sdk/src/core/mod.rs | 5 + sdk/src/core/native-binding.ts | 63 + sdk/src/core/plugin_shared.rs | 623 +++++ sdk/src/diagnostic_helpers.rs | 32 + sdk/src/diagnostics.test.ts | 54 + sdk/src/diagnostics.ts | 415 +++ sdk/src/globals.ts | 1 + sdk/src/index.test.ts | 11 + sdk/src/index.ts | 14 + sdk/src/inputs/AbstractInputCapability.ts | 186 ++ sdk/src/inputs/effect-md-cleanup.ts | 166 ++ sdk/src/inputs/effect-orphan-cleanup.test.ts | 249 ++ sdk/src/inputs/effect-orphan-cleanup.ts | 308 +++ sdk/src/inputs/effect-skill-sync.test.ts | 115 + sdk/src/inputs/effect-skill-sync.ts | 181 ++ sdk/src/inputs/index.ts | 59 + .../input-agentskills-export-fallback.test.ts | 80 + sdk/src/inputs/input-agentskills-types.ts | 10 + sdk/src/inputs/input-agentskills.test.ts | 179 ++ sdk/src/inputs/input-agentskills.ts | 836 ++++++ sdk/src/inputs/input-aindex.test.ts | 187 ++ sdk/src/inputs/input-aindex.ts | 270 ++ sdk/src/inputs/input-command.test.ts | 148 ++ sdk/src/inputs/input-command.ts | 152 ++ sdk/src/inputs/input-editorconfig.ts | 23 + sdk/src/inputs/input-git-exclude.ts | 32 + sdk/src/inputs/input-gitignore.ts | 32 + sdk/src/inputs/input-global-memory.ts | 136 + sdk/src/inputs/input-jetbrains-config.ts | 31 + sdk/src/inputs/input-project-prompt.test.ts | 176 ++ sdk/src/inputs/input-project-prompt.ts | 435 ++++ sdk/src/inputs/input-public-config.test.ts | 450 ++++ sdk/src/inputs/input-readme.test.ts | 49 + sdk/src/inputs/input-readme.ts | 270 ++ sdk/src/inputs/input-rule.test.ts | 93 + sdk/src/inputs/input-rule.ts | 103 + sdk/src/inputs/input-shared-ignore.ts | 35 + sdk/src/inputs/input-subagent.test.ts | 224 ++ sdk/src/inputs/input-subagent.ts | 179 ++ sdk/src/inputs/input-vscode-config.ts | 27 + sdk/src/inputs/input-workspace.ts | 28 + sdk/src/inputs/input-zed-config.ts | 23 + sdk/src/inputs/runtime.ts | 172 ++ sdk/src/lib.rs | 546 ++++ sdk/src/pipeline/CliArgumentParser.test.ts | 9 + sdk/src/pipeline/CliArgumentParser.ts | 265 ++ sdk/src/pipeline/ContextMerger.ts | 207 ++ sdk/src/pipeline/DependencyResolver.ts | 136 + sdk/src/pipeline/OutputRuntimeTargets.ts | 57 + sdk/src/plugin-runtime.ts | 128 + sdk/src/plugin.config.ts | 58 + sdk/src/plugins/AbstractOutputPlugin.test.ts | 122 + sdk/src/plugins/AgentsOutputPlugin.test.ts | 124 + sdk/src/plugins/AgentsOutputPlugin.ts | 127 + sdk/src/plugins/ClaudeCodeCLIOutputPlugin.ts | 123 + sdk/src/plugins/CodexCLIOutputPlugin.test.ts | 364 +++ sdk/src/plugins/CodexCLIOutputPlugin.ts | 124 + sdk/src/plugins/CursorOutputPlugin.test.ts | 351 +++ sdk/src/plugins/CursorOutputPlugin.ts | 561 ++++ sdk/src/plugins/DroidCLIOutputPlugin.ts | 56 + sdk/src/plugins/EditorConfigOutputPlugin.ts | 59 + sdk/src/plugins/GeminiCLIOutputPlugin.ts | 57 + .../plugins/GenericSkillsOutputPlugin.test.ts | 192 ++ sdk/src/plugins/GenericSkillsOutputPlugin.ts | 245 ++ sdk/src/plugins/GitExcludeOutputPlugin.ts | 90 + .../JetBrainsAIAssistantCodexOutputPlugin.ts | 366 +++ ...JetBrainsIDECodeStyleConfigOutputPlugin.ts | 68 + .../plugins/OpencodeCLIOutputPlugin.test.ts | 118 + sdk/src/plugins/OpencodeCLIOutputPlugin.ts | 499 ++++ sdk/src/plugins/PromptMarkdownCleanup.test.ts | 259 ++ .../QoderIDEPluginOutputPlugin.test.ts | 396 +++ sdk/src/plugins/QoderIDEPluginOutputPlugin.ts | 419 +++ .../plugins/ReadmeMdConfigFileOutputPlugin.ts | 72 + sdk/src/plugins/TraeCNIDEOutputPlugin.ts | 60 + sdk/src/plugins/TraeIDEOutputPlugin.test.ts | 125 + sdk/src/plugins/TraeIDEOutputPlugin.ts | 295 +++ .../VisualStudioCodeIDEConfigOutputPlugin.ts | 65 + sdk/src/plugins/WarpIDEOutputPlugin.test.ts | 75 + sdk/src/plugins/WarpIDEOutputPlugin.ts | 110 + sdk/src/plugins/WindsurfOutputPlugin.test.ts | 212 ++ sdk/src/plugins/WindsurfOutputPlugin.ts | 278 ++ sdk/src/plugins/WslMirrorDeclarations.test.ts | 25 + sdk/src/plugins/ZedIDEConfigOutputPlugin.ts | 64 + sdk/src/plugins/desk-paths.test.ts | 141 + sdk/src/plugins/desk-paths.ts | 1 + sdk/src/plugins/ide-config-output.test.ts | 238 ++ sdk/src/plugins/plugin-agentskills-compact.ts | 3 + sdk/src/plugins/plugin-agentsmd.ts | 3 + sdk/src/plugins/plugin-claude-code-cli.ts | 3 + sdk/src/plugins/plugin-core.ts | 172 ++ .../AbstractOutputPlugin.frontmatter.test.ts | 204 ++ .../AbstractOutputPlugin.subagents.test.ts | 114 + .../plugin-core/AbstractOutputPlugin.ts | 1424 ++++++++++ sdk/src/plugins/plugin-core/AbstractPlugin.ts | 26 + .../plugin-core/AindexConfigDefaults.ts | 123 + sdk/src/plugins/plugin-core/AindexTypes.ts | 367 +++ .../plugins/plugin-core/ConfigTypes.schema.ts | 188 ++ .../plugin-core/DistPromptGuards.test.ts | 22 + .../plugins/plugin-core/DistPromptGuards.ts | 68 + .../plugin-core/ExportMetadataTypes.ts | 278 ++ .../plugin-core/GlobalScopeCollector.ts | 231 ++ sdk/src/plugins/plugin-core/InputTypes.ts | 418 +++ .../plugin-core/LocalizedPromptReader.ts | 736 ++++++ .../plugins/plugin-core/McpConfigManager.ts | 251 ++ sdk/src/plugins/plugin-core/OutputTypes.ts | 145 ++ .../plugin-core/PromptArtifactCache.test.ts | 203 ++ .../plugin-core/PromptArtifactCache.ts | 317 +++ .../PromptCompilerDiagnostics.test.ts | 47 + .../plugin-core/PromptCompilerDiagnostics.ts | 65 + sdk/src/plugins/plugin-core/PromptIdentity.ts | 59 + sdk/src/plugins/plugin-core/PromptTypes.ts | 184 ++ sdk/src/plugins/plugin-core/RegistryWriter.ts | 179 ++ sdk/src/plugins/plugin-core/constants.ts | 113 + sdk/src/plugins/plugin-core/enums.ts | 53 + sdk/src/plugins/plugin-core/filters.ts | 261 ++ .../plugin.outputScopes.validation.test.ts | 182 ++ sdk/src/plugins/plugin-core/plugin.ts | 541 ++++ .../plugins/plugin-core/scopePolicy.test.ts | 50 + sdk/src/plugins/plugin-core/scopePolicy.ts | 73 + sdk/src/plugins/plugin-core/types.ts | 39 + sdk/src/plugins/plugin-cursor.ts | 3 + sdk/src/plugins/plugin-droid-cli.ts | 3 + sdk/src/plugins/plugin-editorconfig.ts | 3 + sdk/src/plugins/plugin-gemini-cli.ts | 3 + sdk/src/plugins/plugin-git-exclude.ts | 3 + sdk/src/plugins/plugin-jetbrains-ai-codex.ts | 3 + sdk/src/plugins/plugin-jetbrains-codestyle.ts | 3 + sdk/src/plugins/plugin-openai-codex-cli.ts | 3 + sdk/src/plugins/plugin-opencode-cli.ts | 3 + sdk/src/plugins/plugin-qoder-ide.ts | 3 + sdk/src/plugins/plugin-readme.ts | 3 + sdk/src/plugins/plugin-trae-cn-ide.ts | 3 + sdk/src/plugins/plugin-trae-ide.ts | 3 + sdk/src/plugins/plugin-vscode.ts | 3 + sdk/src/plugins/plugin-warp-ide.ts | 3 + sdk/src/plugins/plugin-windsurf.ts | 3 + sdk/src/plugins/plugin-zed.ts | 3 + sdk/src/prompts.test.ts | 367 +++ sdk/src/prompts.ts | 804 ++++++ sdk/src/public-config-paths.ts | 208 ++ sdk/src/runtime-environment.test.ts | 149 ++ sdk/src/runtime-environment.ts | 361 +++ sdk/src/schema.ts | 14 + sdk/src/script-runtime-worker.ts | 19 + sdk/src/wsl-mirror-sync.test.ts | 588 +++++ sdk/src/wsl-mirror-sync.ts | 656 +++++ 209 files changed, 36494 insertions(+) create mode 100644 sdk/src/Aindex.ts create mode 100644 sdk/src/ConfigLoader.test.ts create mode 100644 sdk/src/ConfigLoader.ts create mode 100644 sdk/src/PluginPipeline.test.ts create mode 100644 sdk/src/PluginPipeline.ts create mode 100644 sdk/src/ProtectedDeletionGuard.ts create mode 100644 sdk/src/aindex-config/AindexProjectConfig.ts create mode 100644 sdk/src/aindex-config/AindexProjectConfigLoader.ts create mode 100644 sdk/src/aindex-config/index.ts create mode 100644 sdk/src/aindex-project-series.ts create mode 100644 sdk/src/bridge/mod.rs create mode 100644 sdk/src/bridge/node.rs create mode 100644 sdk/src/cleanup/delete-targets.ts create mode 100644 sdk/src/cleanup/empty-directories.ts create mode 100644 sdk/src/cli-runtime.test.ts create mode 100644 sdk/src/cli-runtime.ts create mode 100644 sdk/src/commands/CleanCommand.ts create mode 100644 sdk/src/commands/CleanupUtils.adapter.test.ts create mode 100644 sdk/src/commands/CleanupUtils.test.ts create mode 100644 sdk/src/commands/CleanupUtils.ts create mode 100644 sdk/src/commands/Command.ts create mode 100644 sdk/src/commands/CommandFactory.ts create mode 100644 sdk/src/commands/CommandRegistry.ts create mode 100644 sdk/src/commands/CommandUtils.ts create mode 100644 sdk/src/commands/ConfigCommand.ts create mode 100644 sdk/src/commands/ConfigShowCommand.ts create mode 100644 sdk/src/commands/DryRunCleanCommand.ts create mode 100644 sdk/src/commands/DryRunOutputCommand.ts create mode 100644 sdk/src/commands/ExecuteCommand.ts create mode 100644 sdk/src/commands/HelpCommand.ts create mode 100644 sdk/src/commands/InitCommand.test.ts create mode 100644 sdk/src/commands/InitCommand.ts create mode 100644 sdk/src/commands/JsonOutputCommand.ts create mode 100644 sdk/src/commands/PluginsCommand.ts create mode 100644 sdk/src/commands/ProtectedDeletionCommands.test.ts create mode 100644 sdk/src/commands/SetCommand.ts create mode 100644 sdk/src/commands/UnknownCommand.ts create mode 100644 sdk/src/commands/VersionCommand.ts create mode 100644 sdk/src/commands/bridge.rs create mode 100644 sdk/src/commands/config_cmd.rs create mode 100644 sdk/src/commands/config_show.rs create mode 100644 sdk/src/commands/factories/CleanCommandFactory.ts create mode 100644 sdk/src/commands/factories/ConfigCommandFactory.ts create mode 100644 sdk/src/commands/factories/DryRunCommandFactory.ts create mode 100644 sdk/src/commands/factories/ExecuteCommandFactory.ts create mode 100644 sdk/src/commands/factories/HelpCommandFactory.ts create mode 100644 sdk/src/commands/factories/InitCommandFactory.ts create mode 100644 sdk/src/commands/factories/PluginsCommandFactory.ts create mode 100644 sdk/src/commands/factories/UnknownCommandFactory.ts create mode 100644 sdk/src/commands/factories/VersionCommandFactory.ts create mode 100644 sdk/src/commands/help.rs create mode 100644 sdk/src/commands/mod.rs create mode 100644 sdk/src/commands/version.rs create mode 100644 sdk/src/config.outputScopes.test.ts create mode 100644 sdk/src/config.plugins-fast-path.test.ts create mode 100644 sdk/src/config.test.ts create mode 100644 sdk/src/config.ts create mode 100644 sdk/src/core/cleanup.rs create mode 100644 sdk/src/core/config/mod.rs create mode 100644 sdk/src/core/config/series_filter.rs create mode 100644 sdk/src/core/desk-paths.ts create mode 100644 sdk/src/core/desk_paths.rs create mode 100644 sdk/src/core/input_plugins.rs create mode 100644 sdk/src/core/mod.rs create mode 100644 sdk/src/core/native-binding.ts create mode 100644 sdk/src/core/plugin_shared.rs create mode 100644 sdk/src/diagnostic_helpers.rs create mode 100644 sdk/src/diagnostics.test.ts create mode 100644 sdk/src/diagnostics.ts create mode 100644 sdk/src/globals.ts create mode 100644 sdk/src/index.test.ts create mode 100644 sdk/src/index.ts create mode 100644 sdk/src/inputs/AbstractInputCapability.ts create mode 100644 sdk/src/inputs/effect-md-cleanup.ts create mode 100644 sdk/src/inputs/effect-orphan-cleanup.test.ts create mode 100644 sdk/src/inputs/effect-orphan-cleanup.ts create mode 100644 sdk/src/inputs/effect-skill-sync.test.ts create mode 100644 sdk/src/inputs/effect-skill-sync.ts create mode 100644 sdk/src/inputs/index.ts create mode 100644 sdk/src/inputs/input-agentskills-export-fallback.test.ts create mode 100644 sdk/src/inputs/input-agentskills-types.ts create mode 100644 sdk/src/inputs/input-agentskills.test.ts create mode 100644 sdk/src/inputs/input-agentskills.ts create mode 100644 sdk/src/inputs/input-aindex.test.ts create mode 100644 sdk/src/inputs/input-aindex.ts create mode 100644 sdk/src/inputs/input-command.test.ts create mode 100644 sdk/src/inputs/input-command.ts create mode 100644 sdk/src/inputs/input-editorconfig.ts create mode 100644 sdk/src/inputs/input-git-exclude.ts create mode 100644 sdk/src/inputs/input-gitignore.ts create mode 100644 sdk/src/inputs/input-global-memory.ts create mode 100644 sdk/src/inputs/input-jetbrains-config.ts create mode 100644 sdk/src/inputs/input-project-prompt.test.ts create mode 100644 sdk/src/inputs/input-project-prompt.ts create mode 100644 sdk/src/inputs/input-public-config.test.ts create mode 100644 sdk/src/inputs/input-readme.test.ts create mode 100644 sdk/src/inputs/input-readme.ts create mode 100644 sdk/src/inputs/input-rule.test.ts create mode 100644 sdk/src/inputs/input-rule.ts create mode 100644 sdk/src/inputs/input-shared-ignore.ts create mode 100644 sdk/src/inputs/input-subagent.test.ts create mode 100644 sdk/src/inputs/input-subagent.ts create mode 100644 sdk/src/inputs/input-vscode-config.ts create mode 100644 sdk/src/inputs/input-workspace.ts create mode 100644 sdk/src/inputs/input-zed-config.ts create mode 100644 sdk/src/inputs/runtime.ts create mode 100644 sdk/src/lib.rs create mode 100644 sdk/src/pipeline/CliArgumentParser.test.ts create mode 100644 sdk/src/pipeline/CliArgumentParser.ts create mode 100644 sdk/src/pipeline/ContextMerger.ts create mode 100644 sdk/src/pipeline/DependencyResolver.ts create mode 100644 sdk/src/pipeline/OutputRuntimeTargets.ts create mode 100644 sdk/src/plugin-runtime.ts create mode 100644 sdk/src/plugin.config.ts create mode 100644 sdk/src/plugins/AbstractOutputPlugin.test.ts create mode 100644 sdk/src/plugins/AgentsOutputPlugin.test.ts create mode 100644 sdk/src/plugins/AgentsOutputPlugin.ts create mode 100644 sdk/src/plugins/ClaudeCodeCLIOutputPlugin.ts create mode 100644 sdk/src/plugins/CodexCLIOutputPlugin.test.ts create mode 100644 sdk/src/plugins/CodexCLIOutputPlugin.ts create mode 100644 sdk/src/plugins/CursorOutputPlugin.test.ts create mode 100644 sdk/src/plugins/CursorOutputPlugin.ts create mode 100644 sdk/src/plugins/DroidCLIOutputPlugin.ts create mode 100644 sdk/src/plugins/EditorConfigOutputPlugin.ts create mode 100644 sdk/src/plugins/GeminiCLIOutputPlugin.ts create mode 100644 sdk/src/plugins/GenericSkillsOutputPlugin.test.ts create mode 100644 sdk/src/plugins/GenericSkillsOutputPlugin.ts create mode 100644 sdk/src/plugins/GitExcludeOutputPlugin.ts create mode 100644 sdk/src/plugins/JetBrainsAIAssistantCodexOutputPlugin.ts create mode 100644 sdk/src/plugins/JetBrainsIDECodeStyleConfigOutputPlugin.ts create mode 100644 sdk/src/plugins/OpencodeCLIOutputPlugin.test.ts create mode 100644 sdk/src/plugins/OpencodeCLIOutputPlugin.ts create mode 100644 sdk/src/plugins/PromptMarkdownCleanup.test.ts create mode 100644 sdk/src/plugins/QoderIDEPluginOutputPlugin.test.ts create mode 100644 sdk/src/plugins/QoderIDEPluginOutputPlugin.ts create mode 100644 sdk/src/plugins/ReadmeMdConfigFileOutputPlugin.ts create mode 100644 sdk/src/plugins/TraeCNIDEOutputPlugin.ts create mode 100644 sdk/src/plugins/TraeIDEOutputPlugin.test.ts create mode 100644 sdk/src/plugins/TraeIDEOutputPlugin.ts create mode 100644 sdk/src/plugins/VisualStudioCodeIDEConfigOutputPlugin.ts create mode 100644 sdk/src/plugins/WarpIDEOutputPlugin.test.ts create mode 100644 sdk/src/plugins/WarpIDEOutputPlugin.ts create mode 100644 sdk/src/plugins/WindsurfOutputPlugin.test.ts create mode 100644 sdk/src/plugins/WindsurfOutputPlugin.ts create mode 100644 sdk/src/plugins/WslMirrorDeclarations.test.ts create mode 100644 sdk/src/plugins/ZedIDEConfigOutputPlugin.ts create mode 100644 sdk/src/plugins/desk-paths.test.ts create mode 100644 sdk/src/plugins/desk-paths.ts create mode 100644 sdk/src/plugins/ide-config-output.test.ts create mode 100644 sdk/src/plugins/plugin-agentskills-compact.ts create mode 100644 sdk/src/plugins/plugin-agentsmd.ts create mode 100644 sdk/src/plugins/plugin-claude-code-cli.ts create mode 100644 sdk/src/plugins/plugin-core.ts create mode 100644 sdk/src/plugins/plugin-core/AbstractOutputPlugin.frontmatter.test.ts create mode 100644 sdk/src/plugins/plugin-core/AbstractOutputPlugin.subagents.test.ts create mode 100644 sdk/src/plugins/plugin-core/AbstractOutputPlugin.ts create mode 100644 sdk/src/plugins/plugin-core/AbstractPlugin.ts create mode 100644 sdk/src/plugins/plugin-core/AindexConfigDefaults.ts create mode 100644 sdk/src/plugins/plugin-core/AindexTypes.ts create mode 100644 sdk/src/plugins/plugin-core/ConfigTypes.schema.ts create mode 100644 sdk/src/plugins/plugin-core/DistPromptGuards.test.ts create mode 100644 sdk/src/plugins/plugin-core/DistPromptGuards.ts create mode 100644 sdk/src/plugins/plugin-core/ExportMetadataTypes.ts create mode 100644 sdk/src/plugins/plugin-core/GlobalScopeCollector.ts create mode 100644 sdk/src/plugins/plugin-core/InputTypes.ts create mode 100644 sdk/src/plugins/plugin-core/LocalizedPromptReader.ts create mode 100644 sdk/src/plugins/plugin-core/McpConfigManager.ts create mode 100644 sdk/src/plugins/plugin-core/OutputTypes.ts create mode 100644 sdk/src/plugins/plugin-core/PromptArtifactCache.test.ts create mode 100644 sdk/src/plugins/plugin-core/PromptArtifactCache.ts create mode 100644 sdk/src/plugins/plugin-core/PromptCompilerDiagnostics.test.ts create mode 100644 sdk/src/plugins/plugin-core/PromptCompilerDiagnostics.ts create mode 100644 sdk/src/plugins/plugin-core/PromptIdentity.ts create mode 100644 sdk/src/plugins/plugin-core/PromptTypes.ts create mode 100644 sdk/src/plugins/plugin-core/RegistryWriter.ts create mode 100644 sdk/src/plugins/plugin-core/constants.ts create mode 100644 sdk/src/plugins/plugin-core/enums.ts create mode 100644 sdk/src/plugins/plugin-core/filters.ts create mode 100644 sdk/src/plugins/plugin-core/plugin.outputScopes.validation.test.ts create mode 100644 sdk/src/plugins/plugin-core/plugin.ts create mode 100644 sdk/src/plugins/plugin-core/scopePolicy.test.ts create mode 100644 sdk/src/plugins/plugin-core/scopePolicy.ts create mode 100644 sdk/src/plugins/plugin-core/types.ts create mode 100644 sdk/src/plugins/plugin-cursor.ts create mode 100644 sdk/src/plugins/plugin-droid-cli.ts create mode 100644 sdk/src/plugins/plugin-editorconfig.ts create mode 100644 sdk/src/plugins/plugin-gemini-cli.ts create mode 100644 sdk/src/plugins/plugin-git-exclude.ts create mode 100644 sdk/src/plugins/plugin-jetbrains-ai-codex.ts create mode 100644 sdk/src/plugins/plugin-jetbrains-codestyle.ts create mode 100644 sdk/src/plugins/plugin-openai-codex-cli.ts create mode 100644 sdk/src/plugins/plugin-opencode-cli.ts create mode 100644 sdk/src/plugins/plugin-qoder-ide.ts create mode 100644 sdk/src/plugins/plugin-readme.ts create mode 100644 sdk/src/plugins/plugin-trae-cn-ide.ts create mode 100644 sdk/src/plugins/plugin-trae-ide.ts create mode 100644 sdk/src/plugins/plugin-vscode.ts create mode 100644 sdk/src/plugins/plugin-warp-ide.ts create mode 100644 sdk/src/plugins/plugin-windsurf.ts create mode 100644 sdk/src/plugins/plugin-zed.ts create mode 100644 sdk/src/prompts.test.ts create mode 100644 sdk/src/prompts.ts create mode 100644 sdk/src/public-config-paths.ts create mode 100644 sdk/src/runtime-environment.test.ts create mode 100644 sdk/src/runtime-environment.ts create mode 100644 sdk/src/schema.ts create mode 100644 sdk/src/script-runtime-worker.ts create mode 100644 sdk/src/wsl-mirror-sync.test.ts create mode 100644 sdk/src/wsl-mirror-sync.ts diff --git a/sdk/src/Aindex.ts b/sdk/src/Aindex.ts new file mode 100644 index 00000000..51c66948 --- /dev/null +++ b/sdk/src/Aindex.ts @@ -0,0 +1,161 @@ +/** + * Aindex validation and generation utilities + * 使用扁平的 bundles 结构直接遍历创建项目目录和文件 + */ +import type {AindexConfig, ILogger} from './plugins/plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {buildUsageDiagnostic, diagnosticLines} from '@/diagnostics' + +/** + * Version control check result + */ +export interface VersionControlCheckResult { + readonly hasGit: boolean + readonly gitPath: string +} + +/** + * Check if the aindex has version control (.git directory) + * Logs info if .git exists, warns if not + * + * @param rootPath - Root path of the aindex + * @param logger - Optional logger instance + * @returns Version control check result + */ +export function checkVersionControl( + rootPath: string, + logger?: ILogger +): VersionControlCheckResult { + const gitPath = path.join(rootPath, '.git') + const hasGit = fs.existsSync(gitPath) + + if (hasGit) logger?.info('version control detected', {path: gitPath}) + else { + logger?.warn(buildUsageDiagnostic({ + code: 'AINDEX_VERSION_CONTROL_MISSING', + title: 'Aindex root is not under version control', + rootCause: diagnosticLines(`tnmsc did not find a .git directory under "${rootPath}".`), + exactFix: diagnosticLines( + `Initialize git in "${rootPath}" or place the aindex inside an existing git repository.` + ), + possibleFixes: [ + diagnosticLines('Run `git init` in the aindex root if the directory should be versioned.') + ], + details: { + rootPath, + gitPath + } + })) + } + + return {hasGit, gitPath} +} + +/** + * Generation result + */ +export interface GenerationResult { + readonly success: boolean + readonly rootPath: string + readonly createdDirs: readonly string[] + readonly createdFiles: readonly string[] + readonly existedDirs: readonly string[] + readonly existedFiles: readonly string[] +} + +/** + * Generation options + */ +export interface GenerationOptions { + /** Logger instance */ + readonly logger?: ILogger + /** Aindex structure from user config */ + readonly config?: Required +} + +const DEFAULT_FILE_CONTENT = '# Generated by tnmsc init\n' + +function isFilePath(relativePath: string): boolean { + return path.extname(relativePath).length > 0 +} + +/** + * Generate aindex directory structure + */ +export function generateAindex( + rootPath: string, + options: GenerationOptions = {} +): GenerationResult { + const {logger, config} = options + const createdDirs: string[] = [] + const createdFiles: string[] = [] + const existedDirs: string[] = [] + const existedFiles: string[] = [] + const createdDirsSet = new Set() + const existedDirsSet = new Set() + const existedFilesSet = new Set() + + const ensureDirectory = (dirPath: string): void => { + if (fs.existsSync(dirPath)) { + if (!existedDirsSet.has(dirPath)) { + existedDirsSet.add(dirPath) + existedDirs.push(dirPath) + logger?.debug('directory exists', {path: dirPath}) + } + return + } + + fs.mkdirSync(dirPath, {recursive: true}) + + let currentDir = dirPath + while (!createdDirsSet.has(currentDir)) { + createdDirsSet.add(currentDir) + createdDirs.push(currentDir) + logger?.info('created directory', {path: currentDir}) + + if (currentDir === rootPath) break + currentDir = path.dirname(currentDir) + } + } + + const ensureFile = (filePath: string, content: string = DEFAULT_FILE_CONTENT): void => { + ensureDirectory(path.dirname(filePath)) + + if (fs.existsSync(filePath)) { + if (!existedFilesSet.has(filePath)) { + existedFilesSet.add(filePath) + existedFiles.push(filePath) + logger?.debug('file exists', {path: filePath}) + } + return + } + + fs.writeFileSync(filePath, content, 'utf8') + createdFiles.push(filePath) + logger?.info('created file', {path: filePath}) + } + + ensureDirectory(rootPath) + + if (config != null) { + for (const [key, moduleConfig] of Object.entries(config)) { + if (key === 'dir' || typeof moduleConfig !== 'object' || moduleConfig == null) continue + + for (const relativePath of [moduleConfig.src, moduleConfig.dist]) { + const targetPath = path.join(rootPath, relativePath) + if (isFilePath(relativePath)) ensureFile(targetPath) + else ensureDirectory(targetPath) + } + } + } + + return { + success: true, + rootPath, + createdDirs, + createdFiles, + existedDirs, + existedFiles + } +} diff --git a/sdk/src/ConfigLoader.test.ts b/sdk/src/ConfigLoader.test.ts new file mode 100644 index 00000000..7a72fc12 --- /dev/null +++ b/sdk/src/ConfigLoader.test.ts @@ -0,0 +1,67 @@ +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {afterEach, describe, expect, it} from 'vitest' +import {ConfigLoader, getGlobalConfigPath} from './ConfigLoader' + +describe('configLoader', () => { + const originalHome = process.env.HOME + const originalUserProfile = process.env.USERPROFILE + const originalHomeDrive = process.env.HOMEDRIVE + const originalHomePath = process.env.HOMEPATH + + afterEach(() => { + process.env.HOME = originalHome + process.env.USERPROFILE = originalUserProfile + process.env.HOMEDRIVE = originalHomeDrive + process.env.HOMEPATH = originalHomePath + }) + + it('searches only the canonical global config path', () => { + const tempHome = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-home-')) + process.env.HOME = tempHome + process.env.USERPROFILE = tempHome + delete process.env.HOMEDRIVE + delete process.env.HOMEPATH + + try { + const loader = new ConfigLoader() + expect(loader.getSearchPaths(path.join(tempHome, 'workspace'))).toEqual([getGlobalConfigPath()]) + } + finally { + fs.rmSync(tempHome, {recursive: true, force: true}) + } + }) + + it('defaults aindex.softwares when loading an older config file', () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-config-loader-')) + const configPath = path.join(tempDir, '.tnmsc.json') + + try { + fs.writeFileSync(configPath, JSON.stringify({ + workspaceDir: '/tmp/workspace', + aindex: { + dir: 'aindex', + skills: {src: 'skills', dist: 'dist/skills'}, + commands: {src: 'commands', dist: 'dist/commands'}, + subAgents: {src: 'subagents', dist: 'dist/subagents'}, + rules: {src: 'rules', dist: 'dist/rules'}, + globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, + workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, + app: {src: 'app', dist: 'dist/app'}, + ext: {src: 'ext', dist: 'dist/ext'}, + arch: {src: 'arch', dist: 'dist/arch'} + } + }), 'utf8') + + const loader = new ConfigLoader() + const result = loader.loadFromFile(configPath) + + expect(result.found).toBe(true) + expect(result.config.aindex?.softwares).toEqual({src: 'softwares', dist: 'dist/softwares'}) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/ConfigLoader.ts b/sdk/src/ConfigLoader.ts new file mode 100644 index 00000000..90e15cff --- /dev/null +++ b/sdk/src/ConfigLoader.ts @@ -0,0 +1,473 @@ +import type {ILogger} from '@truenine/logger' +import type { + AindexConfig, + CleanupProtectionOptions, + ConfigLoaderOptions, + ConfigLoadResult, + FrontMatterOptions, + OutputScopeOptions, + PluginOutputScopeTopics, + UserConfigFile, + WindowsOptions +} from './plugins/plugin-core' +import * as fs from 'node:fs' +import process from 'node:process' +import {createLogger} from '@truenine/logger' +import { + buildConfigDiagnostic, + buildFileOperationDiagnostic, + diagnosticLines, + splitDiagnosticText +} from './diagnostics' +import {mergeAindexConfig, ZUserConfigFile} from './plugins/plugin-core' +import { + getRequiredGlobalConfigPath, + resolveRuntimeEnvironment, + resolveUserPath, + DEFAULT_GLOBAL_CONFIG_FILE_NAME as RUNTIME_DEFAULT_CONFIG_FILE_NAME, + DEFAULT_GLOBAL_CONFIG_DIR as RUNTIME_DEFAULT_GLOBAL_CONFIG_DIR +} from './runtime-environment' + +/** + * Default config file name + */ +export const DEFAULT_CONFIG_FILE_NAME = '.tnmsc.json' + +/** + * Default global config directory (relative to home) + */ +export const DEFAULT_GLOBAL_CONFIG_DIR = '.aindex' + +/** + * Get global config file path + */ +export function getGlobalConfigPath(): string { + return getRequiredGlobalConfigPath() +} + +/** + * Validation result for global config + */ +export interface GlobalConfigValidationResult { + readonly valid: boolean + + readonly exists: boolean + + readonly errors: readonly string[] + + readonly shouldExit: boolean +} + +/** + * ConfigLoader handles discovery and loading of user configuration files. + * + * The config source is fixed and unambiguous: + * 1. Global: ~/.aindex/.tnmsc.json + */ +export class ConfigLoader { + private readonly logger: ILogger + + constructor(options: ConfigLoaderOptions = {}) { + void options + this.logger = createLogger('ConfigLoader') + } + + getSearchPaths(cwd: string = process.cwd()): string[] { + void cwd + const runtimeEnvironment = resolveRuntimeEnvironment() + + if (!runtimeEnvironment.isWsl) return [getRequiredGlobalConfigPath()] + + this.logger.info('wsl environment detected', { + effectiveHomeDir: runtimeEnvironment.effectiveHomeDir + }) + if (runtimeEnvironment.selectedGlobalConfigPath == null) { + throw new Error( + `WSL host config file not found under "${runtimeEnvironment.windowsUsersRoot}/*/${DEFAULT_GLOBAL_CONFIG_DIR}/${DEFAULT_CONFIG_FILE_NAME}".` + ) + } + this.logger.info('using wsl host global config', { + path: runtimeEnvironment.selectedGlobalConfigPath + }) + return [getRequiredGlobalConfigPath()] + } + + loadFromFile(filePath: string): ConfigLoadResult { + const resolvedPath = this.resolveTilde(filePath) + + try { + if (!fs.existsSync(resolvedPath)) return {config: {}, source: null, found: false} + + const content = fs.readFileSync(resolvedPath, 'utf8') + const config = this.parseConfig(content, resolvedPath) + + this.logger.debug('loaded', {source: resolvedPath}) + return {config, source: resolvedPath, found: true} + } + catch (error) { + this.logger.warn(buildFileOperationDiagnostic({ + code: 'CONFIG_FILE_LOAD_FAILED', + title: 'Failed to load config file', + operation: 'read', + targetKind: 'config file', + path: resolvedPath, + error + })) + return {config: {}, source: null, found: false} + } + } + + load(cwd: string = process.cwd()): MergedConfigResult { + const searchPaths = this.getSearchPaths(cwd) + const loadedConfigs: ConfigLoadResult[] = [] + + for (const searchPath of searchPaths) { + const result = this.loadFromFile(searchPath) + if (result.found) loadedConfigs.push(result) + } + + const merged = this.mergeConfigs(loadedConfigs.map(r => r.config)) // Merge configs (first has highest priority) + const sources = loadedConfigs.map(r => r.source).filter((s): s is string => s !== null) + + return { + config: merged, + sources, + found: loadedConfigs.length > 0 + } + } + + private parseConfig(content: string, filePath: string): UserConfigFile { + let parsed: unknown + try { + parsed = JSON.parse(content) + } + catch (error) { + if (error instanceof SyntaxError) throw new Error(`Invalid JSON in ${filePath}: ${error.message}`) + throw error + } + + const result = ZUserConfigFile.safeParse(parsed) + if (result.success) return result.data + + const errors = result.error.issues.map((i: {path: (string | number)[], message: string}) => `${i.path.join('.')}: ${i.message}`) // Validation failed - throw error instead of returning empty config + throw new Error(`Config validation failed in ${filePath}:\n${errors.join('\n')}`) + } + + private mergeConfigs(configs: UserConfigFile[]): UserConfigFile { + if (configs.length === 0) return {} + + const firstConfig = configs[0] + if (configs.length === 1 && firstConfig != null) return firstConfig + + const reversed = [...configs].reverse() // Reverse to merge from lowest to highest priority + + return reversed.reduce((acc, config) => { + const mergedAindex = this.mergeAindex(acc.aindex, config.aindex) + const mergedOutputScopes = this.mergeOutputScopeOptions(acc.outputScopes, config.outputScopes) + const mergedFrontMatter = this.mergeFrontMatterOptions(acc.frontMatter, config.frontMatter) + const mergedCleanupProtection = this.mergeCleanupProtectionOptions( + acc.cleanupProtection, + config.cleanupProtection + ) + const mergedWindows = this.mergeWindowsOptions(acc.windows, config.windows) + + return { + ...acc, + ...config, + ...mergedAindex != null ? {aindex: mergedAindex} : {}, + ...mergedOutputScopes != null ? {outputScopes: mergedOutputScopes} : {}, + ...mergedFrontMatter != null ? {frontMatter: mergedFrontMatter} : {}, + ...mergedCleanupProtection != null ? {cleanupProtection: mergedCleanupProtection} : {}, + ...mergedWindows != null ? {windows: mergedWindows} : {} + } + }, {}) + } + + private mergeAindex( + a?: AindexConfig, + b?: AindexConfig + ): AindexConfig | undefined { + if (a == null && b == null) return void 0 + if (a == null) return b + if (b == null) return a + return mergeAindexConfig(a, b) + } + + private mergeOutputScopeTopics( + a?: PluginOutputScopeTopics, + b?: PluginOutputScopeTopics + ): PluginOutputScopeTopics | undefined { + if (a == null && b == null) return void 0 + if (a == null) return b + if (b == null) return a + return {...a, ...b} + } + + private mergeOutputScopeOptions( + a?: OutputScopeOptions, + b?: OutputScopeOptions + ): OutputScopeOptions | undefined { + if (a == null && b == null) return void 0 + if (a == null) return b + if (b == null) return a + + const mergedPlugins: Record = {} + for (const [pluginName, topics] of Object.entries(a.plugins ?? {})) { + if (topics != null) mergedPlugins[pluginName] = {...topics} + } + for (const [pluginName, topics] of Object.entries(b.plugins ?? {})) { + const mergedTopics = this.mergeOutputScopeTopics(mergedPlugins[pluginName], topics) + if (mergedTopics != null) mergedPlugins[pluginName] = mergedTopics + } + + if (Object.keys(mergedPlugins).length === 0) return {} + return {plugins: mergedPlugins} + } + + private mergeFrontMatterOptions( + a?: FrontMatterOptions, + b?: FrontMatterOptions + ): FrontMatterOptions | undefined { + if (a == null && b == null) return void 0 + if (a == null) return b + if (b == null) return a + return {...a, ...b} + } + + private mergeCleanupProtectionOptions( + a?: CleanupProtectionOptions, + b?: CleanupProtectionOptions + ): CleanupProtectionOptions | undefined { + if (a == null && b == null) return void 0 + if (a == null) return b + if (b == null) return a + + return { + rules: [ + ...a.rules ?? [], + ...b.rules ?? [] + ] + } + } + + private mergeWindowsOptions( + a?: WindowsOptions, + b?: WindowsOptions + ): WindowsOptions | undefined { + if (a == null && b == null) return void 0 + if (a == null) return b + if (b == null) return a + + return { + ...a, + ...b, + ...a.wsl2 != null || b.wsl2 != null + ? { + wsl2: { + ...a.wsl2, + ...b.wsl2 + } + } + : {} + } + } + + private resolveTilde(p: string): string { + return p.startsWith('~') ? resolveUserPath(p) : p + } +} + +/** + * Result of loading and merging all configurations + */ +export interface MergedConfigResult { + readonly config: UserConfigFile + + readonly sources: readonly string[] + + readonly found: boolean +} + +/** + * Singleton instance for convenience + */ +let defaultLoader: ConfigLoader | null = null + +/** + * Get or create the default ConfigLoader instance + */ +export function getConfigLoader(options?: ConfigLoaderOptions): ConfigLoader { + if (options || !defaultLoader) defaultLoader = new ConfigLoader(options) + return defaultLoader +} + +/** + * Load user configuration using default loader + */ +export function loadUserConfig(cwd?: string): MergedConfigResult { + return getConfigLoader().load(cwd) +} + +/** + * Validate global config file strictly. + * - If config doesn't exist: return invalid result (do not auto-create) + * - If config is invalid (parse error or validation error): return invalid result (do not recreate) + * + * @returns Validation result indicating whether program should continue or exit + */ +export function validateGlobalConfig(): GlobalConfigValidationResult { + const logger = createLogger('ConfigLoader') + let configPath: string + + try { + configPath = getRequiredGlobalConfigPath() + } + catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + logger.error(buildConfigDiagnostic({ + code: 'GLOBAL_CONFIG_PATH_RESOLUTION_FAILED', + title: 'Failed to resolve global config path', + reason: diagnosticLines(errorMessage), + configPath: `${RUNTIME_DEFAULT_GLOBAL_CONFIG_DIR}/${RUNTIME_DEFAULT_CONFIG_FILE_NAME}`, + exactFix: diagnosticLines( + 'Ensure the required global config exists in the expected runtime-specific location before running tnmsc again.' + ) + })) + return { + valid: false, + exists: false, + errors: [errorMessage], + shouldExit: true + } + } + + if (!fs.existsSync(configPath)) { // Check if config file exists - do not auto-create + const error = `Global config not found at ${configPath}. Please create it manually.` + logger.error(buildConfigDiagnostic({ + code: 'GLOBAL_CONFIG_MISSING', + title: 'Global config file is missing', + reason: diagnosticLines( + `tnmsc could not find the required global config file at "${configPath}".` + ), + configPath, + exactFix: diagnosticLines( + 'Create the global config file manually before running tnmsc again.' + ), + possibleFixes: [ + diagnosticLines('Initialize the file with a valid JSON object, for example `{}`.') + ] + })) + return { + valid: false, + exists: false, + errors: [error], + shouldExit: true + } + } + + let content: string + try { + content = fs.readFileSync(configPath, 'utf8') + } + catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + logger.error(buildFileOperationDiagnostic({ + code: 'GLOBAL_CONFIG_READ_FAILED', + title: 'Failed to read global config file', + operation: 'read', + targetKind: 'global config file', + path: configPath, + error: errorMessage + })) + return { + valid: false, + exists: true, + errors: [`Failed to read config: ${errorMessage}`], + shouldExit: true + } + } + + let parsed: unknown + try { + parsed = JSON.parse(content) + } + catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + logger.error(buildConfigDiagnostic({ + code: 'GLOBAL_CONFIG_JSON_INVALID', + title: 'Global config contains invalid JSON', + reason: diagnosticLines( + `tnmsc could not parse the JSON in "${configPath}".`, + `Parser error: ${errorMessage}` + ), + configPath, + exactFix: diagnosticLines( + 'Fix the JSON syntax in the global config file so it parses as a single JSON object.' + ), + possibleFixes: [ + diagnosticLines('Validate the file with a JSON parser and remove trailing commas or invalid tokens.') + ] + })) + return { + valid: false, + exists: true, + errors: [`Invalid JSON: ${errorMessage}`], + shouldExit: true + } + } + + if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) { + logger.error(buildConfigDiagnostic({ + code: 'GLOBAL_CONFIG_NOT_OBJECT', + title: 'Global config must be a JSON object', + reason: diagnosticLines( + `tnmsc parsed "${configPath}" successfully, but the top-level value is not a JSON object.` + ), + configPath, + exactFix: diagnosticLines( + 'Replace the top-level JSON value with an object like `{}` or a valid config object.' + ) + })) + return { + valid: false, + exists: true, + errors: ['Config must be a JSON object'], + shouldExit: true + } + } + + const zodResult = ZUserConfigFile.safeParse(parsed) + if (!zodResult.success) { + const errors = zodResult.error.issues.map((i: {path: (string | number)[], message: string}) => `${i.path.join('.')}: ${i.message}`) + for (const err of errors) { + logger.error(buildConfigDiagnostic({ + code: 'GLOBAL_CONFIG_VALIDATION_FAILED', + title: 'Global config validation failed', + reason: splitDiagnosticText(err), + configPath, + exactFix: diagnosticLines( + 'Update the invalid config field so it matches the tnmsc schema.' + ), + possibleFixes: [ + diagnosticLines('Compare the field name and value against the current config schema or examples.') + ], + details: { + validationError: err + } + })) + } + return { + valid: false, + exists: true, + errors, + shouldExit: true + } + } + + return { + valid: true, + exists: true, + errors: [], + shouldExit: false + } +} diff --git a/sdk/src/PluginPipeline.test.ts b/sdk/src/PluginPipeline.test.ts new file mode 100644 index 00000000..27d12a4f --- /dev/null +++ b/sdk/src/PluginPipeline.test.ts @@ -0,0 +1,60 @@ +import type {PipelineConfig} from './config' +import type {OutputPlugin} from './plugins/plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from './config' +import {PluginPipeline} from './PluginPipeline' +import {createLogger, FilePathKind, PluginKind} from './plugins/plugin-core' + +describe('plugin pipeline output contexts', () => { + it('passes user config options into write contexts', async () => { + const tempDir = path.resolve('tmp/plugin-pipeline-frontmatter') + fs.rmSync(tempDir, {recursive: true, force: true}) + fs.mkdirSync(tempDir, {recursive: true}) + + const outputPath = path.join(tempDir, 'frontmatter.txt') + let seenBlankLineAfter: boolean | undefined + + const plugin: OutputPlugin = { + type: PluginKind.Output, + name: 'CaptureOutputPlugin', + log: createLogger('CaptureOutputPlugin', 'error'), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles(ctx) { + seenBlankLineAfter = ctx.pluginOptions?.frontMatter?.blankLineAfter + return [{path: outputPath, source: 'capture'}] + }, + async convertContent(_declaration, ctx) { + return String(ctx.pluginOptions?.frontMatter?.blankLineAfter) + } + } + + const config: PipelineConfig = { + context: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: tempDir, + getDirectoryName: () => path.basename(tempDir) + }, + projects: [] + } + }, + outputPlugins: [plugin], + userConfigOptions: mergeConfig({ + workspaceDir: tempDir, + frontMatter: { + blankLineAfter: false + } + }) + } + + const result = await new PluginPipeline('node', 'tnmsc').run(config) + + expect(result.success).toBe(true) + expect(seenBlankLineAfter).toBe(false) + expect(fs.readFileSync(outputPath, 'utf8')).toBe('false') + }) +}) diff --git a/sdk/src/PluginPipeline.ts b/sdk/src/PluginPipeline.ts new file mode 100644 index 00000000..652952ba --- /dev/null +++ b/sdk/src/PluginPipeline.ts @@ -0,0 +1,101 @@ +import type {ILogger, OutputCleanContext, OutputCollectedContext, OutputPlugin, OutputRuntimeTargets, OutputWriteContext, PluginOptions} from './plugins/plugin-core' +import type {Command, CommandContext, CommandResult} from '@/commands/Command' +import type {PipelineConfig} from '@/config' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {JsonOutputCommand} from '@/commands/JsonOutputCommand' +import {extractUserArgs, parseArgs, resolveCommand} from '@/pipeline/CliArgumentParser' +import {discoverOutputRuntimeTargets} from '@/pipeline/OutputRuntimeTargets' +import {createLogger, setGlobalLogLevel} from './plugins/plugin-core' + +/** + * Plugin Pipeline - Orchestrates plugin execution + * + * This class has been refactored to use modular components: + * - CliArgumentParser: CLI argument parsing (moved to @/pipeline) + * - DependencyResolver: dependency ordering (moved to @/pipeline) + * - ContextMerger: Context merging (moved to @/pipeline) + */ +export class PluginPipeline { + private readonly logger: ILogger + readonly args: ParsedCliArgs + private outputPlugins: OutputPlugin[] = [] + private runtimeTargets?: OutputRuntimeTargets + + constructor(...cmdArgs: (string | undefined)[]) { + const filtered = cmdArgs.filter((arg): arg is string => arg != null) + const userArgs = extractUserArgs(filtered) + this.args = parseArgs(userArgs) + + const resolvedLogLevel = this.args.logLevel // Resolve log level from parsed args and set globally + if (resolvedLogLevel != null) setGlobalLogLevel(resolvedLogLevel) + this.logger = createLogger('PluginPipeline', resolvedLogLevel) + this.logger.debug('initialized', {args: this.args}) + } + + registerOutputPlugins(plugins: OutputPlugin[]): this { + this.outputPlugins.push(...plugins) + return this + } + + async run(config: PipelineConfig): Promise { + const {context, outputPlugins, userConfigOptions} = config + this.registerOutputPlugins([...outputPlugins]) + + let command: Command = resolveCommand(this.args) + + if (this.args.jsonFlag) { + setGlobalLogLevel('silent') // Suppress all console logging in JSON mode + + const selfJsonCommands = new Set(['config-show', 'plugins']) // only need log suppression, not JsonOutputCommand wrapping // Commands that handle their own JSON output (config --show, plugins) + if (!selfJsonCommands.has(command.name)) command = new JsonOutputCommand(command) + } + + const commandCtx = this.createCommandContext(context, userConfigOptions) + return command.execute(commandCtx) + } + + private createCommandContext(ctx: OutputCollectedContext, userConfigOptions: Required): CommandContext { + return { + logger: this.logger, + outputPlugins: this.outputPlugins, + collectedOutputContext: ctx, + userConfigOptions, + createCleanContext: (dryRun: boolean) => this.createCleanContext(ctx, userConfigOptions, dryRun), + createWriteContext: (dryRun: boolean) => this.createWriteContext(ctx, userConfigOptions, dryRun) + } + } + + private createCleanContext( + ctx: OutputCollectedContext, + userConfigOptions: Required, + dryRun: boolean + ): OutputCleanContext { + return { + logger: this.logger, + collectedOutputContext: ctx, + pluginOptions: userConfigOptions, + runtimeTargets: this.getRuntimeTargets(), + dryRun + } + } + + private createWriteContext( + ctx: OutputCollectedContext, + userConfigOptions: Required, + dryRun: boolean + ): OutputWriteContext { + return { + logger: this.logger, + collectedOutputContext: ctx, + pluginOptions: userConfigOptions, + runtimeTargets: this.getRuntimeTargets(), + dryRun, + registeredPluginNames: this.outputPlugins.map(p => p.name) + } + } + + private getRuntimeTargets(): OutputRuntimeTargets { + this.runtimeTargets ??= discoverOutputRuntimeTargets(this.logger) + return this.runtimeTargets + } +} diff --git a/sdk/src/ProtectedDeletionGuard.ts b/sdk/src/ProtectedDeletionGuard.ts new file mode 100644 index 00000000..f0644679 --- /dev/null +++ b/sdk/src/ProtectedDeletionGuard.ts @@ -0,0 +1,612 @@ +import type {ILogger} from '@truenine/logger' +import type {OutputCollectedContext, PluginOptions} from './plugins/plugin-core' +import type {PublicDefinitionResolveOptions} from './public-config-paths' +import * as fs from 'node:fs' +import * as path from 'node:path' +import process from 'node:process' +import glob from 'fast-glob' +import {buildProtectedDeletionDiagnostic} from './diagnostics' +import { + AINDEX_CONFIG_DIRECTORY_PAIR_KEYS, + AINDEX_PROJECT_SERIES_NAMES +} from './plugins/plugin-core' +import {collectKnownPublicConfigDefinitionPaths} from './public-config-paths' +import {getEffectiveHomeDir, resolveUserPath} from './runtime-environment' + +interface DirPathLike { + readonly path: string + readonly pathKind?: string + readonly basePath?: string + readonly getAbsolutePath?: () => string +} + +export type ProtectionMode = 'direct' | 'recursive' +export type ProtectionRuleMatcher = 'path' | 'glob' + +export interface ProtectedPathRule { + readonly path: string + readonly protectionMode: ProtectionMode + readonly reason: string + readonly source: string + readonly matcher?: ProtectionRuleMatcher +} + +interface CompiledProtectedPathRule extends ProtectedPathRule { + readonly comparisonKeys: readonly string[] + readonly normalizedPath: string + readonly specificity: number +} + +export interface ProtectedPathViolation { + readonly targetPath: string + readonly protectedPath: string + readonly protectionMode: ProtectionMode + readonly reason: string + readonly source: string +} + +export interface ProtectedDeletionGuard { + readonly rules: readonly ProtectedPathRule[] + readonly exactProtectedPaths: readonly string[] + readonly subtreeProtectedPaths: readonly string[] + readonly compiledRules: readonly CompiledProtectedPathRule[] +} + +export interface ProtectedDeletionGuardOptions { + readonly workspaceDir?: string + readonly aindexDir?: string + readonly projectRoots?: readonly string[] + readonly exactProtectedPaths?: readonly string[] + readonly subtreeProtectedPaths?: readonly string[] + readonly rules?: readonly ProtectedPathRule[] + readonly includeReservedWorkspaceContentRoots?: boolean +} + +export class ProtectedDeletionGuardError extends Error { + readonly operation: string + + readonly violations: readonly ProtectedPathViolation[] + + constructor(operation: string, violations: readonly ProtectedPathViolation[]) { + super(buildProtectedDeletionGuardMessage(operation, violations)) + this.name = 'ProtectedDeletionGuardError' + this.operation = operation + this.violations = violations + } +} + +const CONFIGURED_AINDEX_FILE_KEYS = [ + 'globalPrompt', + 'workspacePrompt' +] as const satisfies readonly (keyof Required['aindex'])[] + +function resolveXdgConfigHome(homeDir: string): string { + const xdgConfigHome = process.env['XDG_CONFIG_HOME'] + if (typeof xdgConfigHome === 'string' && xdgConfigHome.trim().length > 0) return xdgConfigHome + return path.join(homeDir, '.config') +} + +function resolveXdgDataHome(homeDir: string): string { + const xdgDataHome = process.env['XDG_DATA_HOME'] + if (typeof xdgDataHome === 'string' && xdgDataHome.trim().length > 0) return xdgDataHome + return path.join(homeDir, '.local', 'share') +} + +function resolveXdgStateHome(homeDir: string): string { + const xdgStateHome = process.env['XDG_STATE_HOME'] + if (typeof xdgStateHome === 'string' && xdgStateHome.trim().length > 0) return xdgStateHome + return path.join(homeDir, '.local', 'state') +} + +function resolveXdgCacheHome(homeDir: string): string { + const xdgCacheHome = process.env['XDG_CACHE_HOME'] + if (typeof xdgCacheHome === 'string' && xdgCacheHome.trim().length > 0) return xdgCacheHome + return path.join(homeDir, '.cache') +} + +function resolveAbsolutePathFromDir(dir: DirPathLike | undefined): string | undefined { + if (dir == null) return void 0 + + if (typeof dir.getAbsolutePath === 'function') { + try { + const absolute = dir.getAbsolutePath() + if (absolute.length > 0) return path.resolve(absolute) + } + catch {} + } + + if (dir.pathKind === 'absolute') return path.resolve(dir.path) + if (typeof dir.basePath === 'string' && dir.basePath.length > 0) return path.resolve(dir.basePath, dir.path) + return void 0 +} + +export function expandHomePath(rawPath: string): string { + if (rawPath === '~' || rawPath.startsWith('~/') || rawPath.startsWith('~\\')) return resolveUserPath(rawPath) + return rawPath +} + +export function resolveAbsolutePath(rawPath: string): string { + return path.resolve(expandHomePath(rawPath)) +} + +function normalizeForComparison(rawPath: string): string { + const normalized = path.normalize(resolveAbsolutePath(rawPath)) + if (process.platform === 'win32') return normalized.toLowerCase() + return normalized +} + +function stripTrailingSeparator(rawPath: string): string { + const {root} = path.parse(rawPath) + if (rawPath === root) return rawPath + return rawPath.endsWith(path.sep) ? rawPath.slice(0, -1) : rawPath +} + +function isSameOrChildPath(candidate: string, parent: string): boolean { + const normalizedCandidate = stripTrailingSeparator(candidate) + const normalizedParent = stripTrailingSeparator(parent) + if (normalizedCandidate === normalizedParent) return true + return normalizedCandidate.startsWith(`${normalizedParent}${path.sep}`) +} + +export function buildComparisonKeys(rawPath: string): readonly string[] { + const absolute = resolveAbsolutePath(rawPath) + const keys = new Set([normalizeForComparison(absolute)]) + + try { + if (fs.existsSync(absolute)) { + const realPath = fs.realpathSync.native(absolute) + keys.add(normalizeForComparison(realPath)) + } + } + catch {} + + return [...keys] +} + +function createProtectedPathRule( + rawPath: string, + protectionMode: ProtectionMode, + reason: string, + source: string, + matcher: ProtectionRuleMatcher = 'path' +): ProtectedPathRule { + return { + path: resolveAbsolutePath(rawPath), + protectionMode, + reason, + source, + matcher + } +} + +function compileRule(rule: ProtectedPathRule): CompiledProtectedPathRule { + const normalizedPath = normalizeForComparison(rule.path) + return { + ...rule, + path: resolveAbsolutePath(rule.path), + comparisonKeys: buildComparisonKeys(rule.path), + normalizedPath, + specificity: stripTrailingSeparator(normalizedPath).length + } +} + +function dedupeAndCompileRules(rules: readonly ProtectedPathRule[]): CompiledProtectedPathRule[] { + const compiledByKey = new Map() + + for (const rule of rules) { + const compiled = compileRule(rule) + compiledByKey.set(`${compiled.protectionMode}:${compiled.normalizedPath}`, compiled) + } + + return [...compiledByKey.values()].sort((a, b) => { + const specificityDiff = b.specificity - a.specificity + if (specificityDiff !== 0) return specificityDiff + + if (a.protectionMode !== b.protectionMode) return a.protectionMode === 'recursive' ? -1 : 1 + return a.path.localeCompare(b.path) + }) +} + +function normalizeGlobPattern(pattern: string): string { + return resolveAbsolutePath(pattern).replaceAll('\\', '/') +} + +function expandProtectedPathRules(rules: readonly ProtectedPathRule[]): ProtectedPathRule[] { + const expandedRules: ProtectedPathRule[] = [] + + for (const rule of rules) { + if (rule.matcher !== 'glob') { + expandedRules.push(createProtectedPathRule(rule.path, rule.protectionMode, rule.reason, rule.source)) + continue + } + + const matchedPaths = glob.sync(normalizeGlobPattern(rule.path), { + onlyFiles: false, + dot: true, + absolute: true, + followSymbolicLinks: false + }) + + for (const matchedPath of matchedPaths) expandedRules.push(createProtectedPathRule(matchedPath, rule.protectionMode, rule.reason, rule.source)) + } + + return expandedRules +} + +function isRuleMatch(targetKey: string, ruleKey: string, protectionMode: ProtectionMode): boolean { + if (protectionMode === 'direct') return isSameOrChildPath(ruleKey, targetKey) + return isSameOrChildPath(targetKey, ruleKey) || isSameOrChildPath(ruleKey, targetKey) +} + +function detectPathProtectionMode(rawPath: string, fallback: ProtectionMode): ProtectionMode { + const absolutePath = resolveAbsolutePath(rawPath) + + try { + if (fs.existsSync(absolutePath) && fs.lstatSync(absolutePath).isDirectory()) return 'recursive' + } + catch {} + + return fallback +} + +function collectBuiltInDangerousPathRules(): ProtectedPathRule[] { + const homeDir = getEffectiveHomeDir() + + return [ + createProtectedPathRule(path.parse(homeDir).root, 'direct', 'built-in dangerous root path', 'built-in-dangerous-root'), + createProtectedPathRule(homeDir, 'direct', 'built-in dangerous home directory', 'built-in-dangerous-root'), + createProtectedPathRule(resolveXdgConfigHome(homeDir), 'direct', 'built-in dangerous config directory', 'built-in-dangerous-root'), + createProtectedPathRule(resolveXdgDataHome(homeDir), 'direct', 'built-in dangerous data directory', 'built-in-dangerous-root'), + createProtectedPathRule(resolveXdgStateHome(homeDir), 'direct', 'built-in dangerous state directory', 'built-in-dangerous-root'), + createProtectedPathRule(resolveXdgCacheHome(homeDir), 'direct', 'built-in dangerous cache directory', 'built-in-dangerous-root'), + createProtectedPathRule(path.join(homeDir, '.aindex'), 'direct', 'built-in global aindex directory', 'built-in-dangerous-root'), + createProtectedPathRule(path.join(homeDir, '.aindex', '.tnmsc.json'), 'direct', 'built-in global config file', 'built-in-config') + ] +} + +function collectWorkspaceReservedRules( + workspaceDir: string, + projectRoots: readonly string[], + includeReservedWorkspaceContentRoots: boolean +): ProtectedPathRule[] { + const rules: ProtectedPathRule[] = [ + createProtectedPathRule(workspaceDir, 'direct', 'workspace root', 'workspace-reserved'), + createProtectedPathRule(path.join(workspaceDir, 'aindex'), 'direct', 'reserved workspace aindex root', 'workspace-reserved'), + createProtectedPathRule(path.join(workspaceDir, 'knowladge'), 'direct', 'reserved workspace knowladge root', 'workspace-reserved') + ] + + for (const projectRoot of projectRoots) rules.push(createProtectedPathRule(projectRoot, 'direct', 'workspace project root', 'workspace-project-root')) + + if (!includeReservedWorkspaceContentRoots) return rules + + rules.push(createProtectedPathRule( + path.join(workspaceDir, 'aindex', 'dist', '**', '*.mdx'), + 'direct', + 'reserved workspace aindex dist mdx files', + 'workspace-reserved', + 'glob' + )) + for (const seriesName of AINDEX_PROJECT_SERIES_NAMES) { + rules.push(createProtectedPathRule( + path.join(workspaceDir, 'aindex', seriesName, '**', '*.mdx'), + 'direct', + `reserved workspace aindex ${seriesName} mdx files`, + 'workspace-reserved', + 'glob' + )) + } + return rules +} + +function collectResolvedAindexRules(aindexDir: string): ProtectedPathRule[] { + return [createProtectedPathRule(aindexDir, 'direct', 'resolved aindex root', 'aindex-root')] +} + +export function collectKnownAindexInputConfigPaths( + aindexDir: string, + resolveOptions?: PublicDefinitionResolveOptions +): string[] { + return collectKnownPublicConfigDefinitionPaths(aindexDir, resolveOptions) +} + +export function collectConfiguredAindexInputRules( + pluginOptions: Required, + aindexDir: string, + resolveOptions?: PublicDefinitionResolveOptions +): ProtectedPathRule[] { + const rules: ProtectedPathRule[] = [] + + for (const key of AINDEX_CONFIG_DIRECTORY_PAIR_KEYS) { + const configuredDir = pluginOptions.aindex[key] + if (configuredDir == null) continue + + rules.push( + createProtectedPathRule( + path.join(aindexDir, configuredDir.src), + 'recursive', + `configured aindex ${key} source directory`, + 'configured-aindex-source' + ) + ) + } + + for (const key of CONFIGURED_AINDEX_FILE_KEYS) { + const configuredFile = pluginOptions.aindex[key] + if (configuredFile == null) continue + + rules.push( + createProtectedPathRule( + path.join(aindexDir, configuredFile.src), + 'direct', + `configured aindex ${key} source file`, + 'configured-aindex-source' + ) + ) + } + + for (const protectedPath of collectKnownAindexInputConfigPaths(aindexDir, resolveOptions)) { + rules.push( + createProtectedPathRule( + protectedPath, + 'direct', + 'known aindex input config file', + 'known-aindex-config' + ) + ) + } + + return rules +} + +export function collectConfiguredAindexInputPaths( + pluginOptions: Required, + aindexDir: string, + resolveOptions?: PublicDefinitionResolveOptions +): string[] { + return collectConfiguredAindexInputRules(pluginOptions, aindexDir, resolveOptions).map(rule => rule.path) +} + +export function collectProtectedInputSourceRules( + collectedOutputContext: OutputCollectedContext +): ProtectedPathRule[] { + const rules: ProtectedPathRule[] = [] + const seen = new Set() + + const addRule = ( + rawPath: string | undefined, + protectionMode: ProtectionMode, + reason: string, + source: string + ): void => { + if (rawPath == null || rawPath.length === 0) return + + const rule = createProtectedPathRule(rawPath, protectionMode, reason, source) + const dedupeKey = `${rule.protectionMode}:${normalizeForComparison(rule.path)}` + if (seen.has(dedupeKey)) return + + seen.add(dedupeKey) + rules.push(rule) + } + + const addRuleFromDir = ( + dir: DirPathLike | undefined, + protectionMode: ProtectionMode, + reason: string, + source: string + ): void => { + const resolved = resolveAbsolutePathFromDir(dir) + if (resolved == null) return + addRule(resolved, protectionMode, reason, source) + } + + addRuleFromDir(collectedOutputContext.globalMemory?.dir as DirPathLike | undefined, 'recursive', 'global memory source directory', 'collected-input-source') + + for (const command of collectedOutputContext.commands ?? []) { + addRuleFromDir(command.dir as DirPathLike | undefined, 'recursive', 'command source directory', 'collected-input-source') + } + + for (const subAgent of collectedOutputContext.subAgents ?? []) { + addRuleFromDir(subAgent.dir as DirPathLike | undefined, 'recursive', 'sub-agent source directory', 'collected-input-source') + } + + for (const rule of collectedOutputContext.rules ?? []) { + addRuleFromDir(rule.dir as DirPathLike | undefined, 'recursive', 'rule source directory', 'collected-input-source') + } + + for (const skill of collectedOutputContext.skills ?? []) { + addRuleFromDir(skill.dir as DirPathLike | undefined, 'recursive', 'skill source directory', 'collected-input-source') + for (const childDoc of skill.childDocs ?? []) { + addRuleFromDir(childDoc.dir as DirPathLike | undefined, 'recursive', 'skill child document directory', 'collected-input-source') + } + for (const resource of skill.resources ?? []) { + if (resource.sourcePath == null || resource.sourcePath.length === 0) continue + addRule( + resource.sourcePath, + detectPathProtectionMode(resource.sourcePath, 'direct'), + 'skill resource source path', + 'collected-input-source' + ) + } + } + + for (const config of collectedOutputContext.vscodeConfigFiles ?? []) { + addRuleFromDir(config.dir as DirPathLike | undefined, 'direct', 'vscode input config file', 'collected-input-config') + } + + for (const config of collectedOutputContext.zedConfigFiles ?? []) { + addRuleFromDir(config.dir as DirPathLike | undefined, 'direct', 'zed input config file', 'collected-input-config') + } + + for (const config of collectedOutputContext.jetbrainsConfigFiles ?? []) { + addRuleFromDir(config.dir as DirPathLike | undefined, 'direct', 'jetbrains input config file', 'collected-input-config') + } + + for (const config of collectedOutputContext.editorConfigFiles ?? []) { + addRuleFromDir(config.dir as DirPathLike | undefined, 'direct', 'editorconfig input file', 'collected-input-config') + } + + for (const ignoreFile of collectedOutputContext.aiAgentIgnoreConfigFiles ?? []) { + addRule(ignoreFile.sourcePath, 'direct', 'AI agent ignore config file', 'collected-input-config') + } + + if (collectedOutputContext.aindexDir != null) { + for (const protectedPath of collectKnownAindexInputConfigPaths(collectedOutputContext.aindexDir, { + workspaceDir: collectedOutputContext.workspace.directory.path + })) { + addRule(protectedPath, 'direct', 'known aindex input config file', 'known-aindex-config') + } + } + + return rules +} + +export function collectProtectedInputSourcePaths(collectedOutputContext: OutputCollectedContext): string[] { + return collectProtectedInputSourceRules(collectedOutputContext).map(rule => rule.path) +} + +function collectLegacyCompatibilityRules(options: ProtectedDeletionGuardOptions): ProtectedPathRule[] { + const rules: ProtectedPathRule[] = [] + + for (const protectedPath of options.exactProtectedPaths ?? []) { + rules.push(createProtectedPathRule(protectedPath, 'direct', 'legacy direct protected path', 'legacy-direct')) + } + + for (const protectedPath of options.subtreeProtectedPaths ?? []) { + rules.push(createProtectedPathRule(protectedPath, 'recursive', 'legacy recursive protected path', 'legacy-recursive')) + } + + return rules +} + +export function createProtectedDeletionGuard( + options: ProtectedDeletionGuardOptions = {} +): ProtectedDeletionGuard { + const includeReservedWorkspaceContentRoots = options.includeReservedWorkspaceContentRoots ?? true + const rules: ProtectedPathRule[] = [ + ...collectBuiltInDangerousPathRules(), + ...collectLegacyCompatibilityRules(options), + ...options.workspaceDir != null + ? collectWorkspaceReservedRules( + options.workspaceDir, + options.projectRoots ?? [], + includeReservedWorkspaceContentRoots + ) + : [], + ...options.aindexDir != null ? collectResolvedAindexRules(options.aindexDir) : [], + ...options.rules ?? [] + ] + const compiledRules = dedupeAndCompileRules(expandProtectedPathRules(rules)) + + return { + rules: compiledRules.map(rule => ({ + path: rule.path, + protectionMode: rule.protectionMode, + reason: rule.reason, + source: rule.source, + ...rule.matcher != null ? {matcher: rule.matcher} : {} + })), + exactProtectedPaths: compiledRules + .filter(rule => rule.protectionMode === 'direct') + .map(rule => rule.path), + subtreeProtectedPaths: compiledRules + .filter(rule => rule.protectionMode === 'recursive') + .map(rule => rule.path), + compiledRules + } +} + +export function collectProjectRoots(collectedOutputContext: OutputCollectedContext): string[] { + const projectRoots = new Set() + + for (const project of collectedOutputContext.workspace.projects) { + if (project.isWorkspaceRootProject === true) continue + const absolutePath = project.dirFromWorkspacePath?.getAbsolutePath?.() + if (absolutePath != null && absolutePath.length > 0) projectRoots.add(resolveAbsolutePath(absolutePath)) + } + + return [...projectRoots] +} + +function selectMoreSpecificRule( + candidate: CompiledProtectedPathRule, + current: CompiledProtectedPathRule | undefined +): CompiledProtectedPathRule { + if (current == null) return candidate + if (candidate.specificity !== current.specificity) return candidate.specificity > current.specificity ? candidate : current + if (candidate.protectionMode !== current.protectionMode) return candidate.protectionMode === 'recursive' ? candidate : current + return candidate.path.localeCompare(current.path) < 0 ? candidate : current +} + +export function getProtectedPathViolation( + targetPath: string, + guard: ProtectedDeletionGuard +): ProtectedPathViolation | undefined { + const absoluteTargetPath = resolveAbsolutePath(targetPath) + const targetKeys = buildComparisonKeys(absoluteTargetPath) + let matchedRule: CompiledProtectedPathRule | undefined + + for (const rule of guard.compiledRules) { + let didMatch = false + + for (const targetKey of targetKeys) { + for (const ruleKey of rule.comparisonKeys) { + if (!isRuleMatch(targetKey, ruleKey, rule.protectionMode)) continue + matchedRule = selectMoreSpecificRule(rule, matchedRule) + didMatch = true + break + } + + if (didMatch) break + } + } + + if (matchedRule == null) return void 0 + + return { + targetPath: absoluteTargetPath, + protectedPath: matchedRule.path, + protectionMode: matchedRule.protectionMode, + reason: matchedRule.reason, + source: matchedRule.source + } +} + +export function partitionDeletionTargets( + targetPaths: readonly string[], + guard: ProtectedDeletionGuard +): {safePaths: string[], violations: ProtectedPathViolation[]} { + const safePaths: string[] = [] + const violationsByTargetPath = new Map() + + for (const targetPath of targetPaths) { + const absoluteTargetPath = resolveAbsolutePath(targetPath) + const violation = getProtectedPathViolation(absoluteTargetPath, guard) + if (violation == null) { + safePaths.push(absoluteTargetPath) + continue + } + + if (!violationsByTargetPath.has(violation.targetPath)) violationsByTargetPath.set(violation.targetPath, violation) + } + + return { + safePaths, + violations: [...violationsByTargetPath.values()].sort((a, b) => a.targetPath.localeCompare(b.targetPath)) + } +} + +export function buildProtectedDeletionGuardMessage( + operation: string, + violations: readonly ProtectedPathViolation[] +): string { + const pathList = violations.map(violation => violation.targetPath).join(', ') + return `Protected deletion guard blocked ${operation} for ${violations.length} path(s): ${pathList}` +} + +export function logProtectedDeletionGuardError( + logger: ILogger, + operation: string, + violations: readonly ProtectedPathViolation[] +): void { + logger.error(buildProtectedDeletionDiagnostic(operation, violations)) +} diff --git a/sdk/src/aindex-config/AindexProjectConfig.ts b/sdk/src/aindex-config/AindexProjectConfig.ts new file mode 100644 index 00000000..82ea42f5 --- /dev/null +++ b/sdk/src/aindex-config/AindexProjectConfig.ts @@ -0,0 +1,29 @@ +/** + * Configuration for empty directory cleanup in aindex projects. + */ +export interface AindexEmptyDirCleanupConfig { + /** Git-style glob patterns to exclude from empty directory cleanup. */ + readonly exclude?: readonly string[] +} + +/** + * Project-level configuration for aindex. + * This is loaded from aindex/aindex.config.ts + */ +export interface AindexProjectConfig { + readonly emptyDirCleanup?: AindexEmptyDirCleanupConfig +} + +export interface AindexProjectConfigLoadResult { + readonly config: AindexProjectConfig + readonly source: string | null + readonly found: boolean +} + +export const DEFAULT_EMPTY_DIR_CLEANUP_CONFIG: AindexEmptyDirCleanupConfig = { + exclude: [] +} + +export function defineAindexProjectConfig(config: AindexProjectConfig): AindexProjectConfig { + return config +} diff --git a/sdk/src/aindex-config/AindexProjectConfigLoader.ts b/sdk/src/aindex-config/AindexProjectConfigLoader.ts new file mode 100644 index 00000000..b77d388f --- /dev/null +++ b/sdk/src/aindex-config/AindexProjectConfigLoader.ts @@ -0,0 +1,88 @@ +import type {ILogger} from '@truenine/logger' +import type {AindexProjectConfig, AindexProjectConfigLoadResult} from './AindexProjectConfig' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {createLogger} from '@truenine/logger' + +const CONFIG_FILE_NAMES = ['aindex.config.ts', 'aindex.config.mts', 'aindex.config.cts', 'aindex.config.js', 'aindex.config.mjs', 'aindex.config.cjs'] + +const DEFAULT_CONFIG: AindexProjectConfig = { + emptyDirCleanup: { + exclude: [] + } +} + +export class AindexProjectConfigLoader { + private readonly logger: ILogger + + constructor() { + this.logger = createLogger('AindexProjectConfigLoader') + } + + async loadFromDirectory(dirPath: string): Promise { + for (const configName of CONFIG_FILE_NAMES) { + const configPath = path.join(dirPath, configName) + if (fs.existsSync(configPath)) { + return this.loadFromFile(configPath) + } + } + return {config: DEFAULT_CONFIG, source: null, found: false} + } + + async loadFromFile(filePath: string): Promise { + try { + const resolvedPath = path.resolve(filePath) + + if (!fs.existsSync(resolvedPath)) { + return {config: DEFAULT_CONFIG, source: null, found: false} + } + + const mod = (await import(resolvedPath)) as Record + const rawConfig = mod != null && typeof mod === 'object' ? 'default' in mod ? mod['default'] : 'config' in mod ? mod['config'] : mod : mod + + const config = this.normalizeConfig(rawConfig) + this.logger.debug('aindex project config loaded', {source: resolvedPath}) + return {config, source: resolvedPath, found: true} + } catch (error) { + this.logger.warn({ + code: 'AINDEX_CONFIG_LOAD_FAILED', + title: 'aindex project config load failed', + rootCause: [error instanceof Error ? error.message : String(error)], + details: {path: filePath} + }) + return {config: DEFAULT_CONFIG, source: null, found: false} + } + } + + private normalizeConfig(raw: unknown): AindexProjectConfig { + if (raw == null || typeof raw !== 'object') return DEFAULT_CONFIG + const obj = raw as Record + + const edc = obj['emptyDirCleanup'] + if (edc != null && typeof edc !== 'object') return {} + + const edcObj = edc as Record + return { + emptyDirCleanup: { + exclude: toStringArray(edcObj['exclude']) + } + } + } +} + +function toStringArray(val: unknown): string[] { + if (Array.isArray(val)) return val.filter((x): x is string => typeof x === 'string') + if (typeof val === 'string') return [val] + return [] +} + +let defaultLoader: AindexProjectConfigLoader | null = null + +export function getAindexProjectConfigLoader(): AindexProjectConfigLoader { + defaultLoader ??= new AindexProjectConfigLoader() + return defaultLoader +} + +export async function loadAindexProjectConfig(dirPath: string): Promise { + return getAindexProjectConfigLoader().loadFromDirectory(dirPath) +} diff --git a/sdk/src/aindex-config/index.ts b/sdk/src/aindex-config/index.ts new file mode 100644 index 00000000..9489c4fc --- /dev/null +++ b/sdk/src/aindex-config/index.ts @@ -0,0 +1,2 @@ +export * from './AindexProjectConfig' +export * from './AindexProjectConfigLoader' diff --git a/sdk/src/aindex-project-series.ts b/sdk/src/aindex-project-series.ts new file mode 100644 index 00000000..0cfa3ddf --- /dev/null +++ b/sdk/src/aindex-project-series.ts @@ -0,0 +1,72 @@ +import type {AindexProjectSeriesName, PluginOptions} from '@/plugins/plugin-core' +import {AINDEX_PROJECT_SERIES_NAMES} from '@/plugins/plugin-core' + +export interface AindexProjectSeriesConfig { + readonly name: AindexProjectSeriesName + readonly src: string + readonly dist: string +} + +export interface AindexProjectSeriesProjectRef { + readonly projectName: string + readonly seriesName: AindexProjectSeriesName + readonly seriesDir: string +} + +export interface AindexProjectSeriesProjectNameConflict { + readonly projectName: string + readonly refs: readonly AindexProjectSeriesProjectRef[] +} + +type AindexProjectSeriesOptions = Required['aindex'] + +export function isAindexProjectSeriesName(value: string): value is AindexProjectSeriesName { + return AINDEX_PROJECT_SERIES_NAMES.includes(value as AindexProjectSeriesName) +} + +export function resolveAindexProjectSeriesConfigs( + options: Required +): readonly AindexProjectSeriesConfig[] { + return AINDEX_PROJECT_SERIES_NAMES.map(name => buildAindexProjectSeriesConfig(options.aindex, name)) +} + +export function resolveAindexProjectSeriesConfig( + options: Required, + seriesName: AindexProjectSeriesName +): AindexProjectSeriesConfig { + return buildAindexProjectSeriesConfig(options.aindex, seriesName) +} + +export function collectAindexProjectSeriesProjectNameConflicts( + refs: readonly AindexProjectSeriesProjectRef[] +): readonly AindexProjectSeriesProjectNameConflict[] { + const refsByProjectName = new Map() + + for (const ref of refs) { + const existingRefs = refsByProjectName.get(ref.projectName) + if (existingRefs == null) refsByProjectName.set(ref.projectName, [ref]) + else existingRefs.push(ref) + } + + return Array.from(refsByProjectName.entries(), ([projectName, projectRefs]) => ({ + projectName, + refs: [...projectRefs] + .sort((left, right) => left.seriesName.localeCompare(right.seriesName)) + })) + .filter(conflict => { + const uniqueSeriesNames = new Set(conflict.refs.map(ref => ref.seriesName)) + return uniqueSeriesNames.size > 1 + }) + .sort((left, right) => left.projectName.localeCompare(right.projectName)) +} + +function buildAindexProjectSeriesConfig( + aindexOptions: AindexProjectSeriesOptions, + seriesName: AindexProjectSeriesName +): AindexProjectSeriesConfig { + return { + name: seriesName, + src: aindexOptions[seriesName].src, + dist: aindexOptions[seriesName].dist + } +} diff --git a/sdk/src/bridge/mod.rs b/sdk/src/bridge/mod.rs new file mode 100644 index 00000000..ab8b3f48 --- /dev/null +++ b/sdk/src/bridge/mod.rs @@ -0,0 +1,3 @@ +//! Node.js bridge — spawns Node.js child process for plugin runtime commands. + +pub mod node; diff --git a/sdk/src/bridge/node.rs b/sdk/src/bridge/node.rs new file mode 100644 index 00000000..4681804c --- /dev/null +++ b/sdk/src/bridge/node.rs @@ -0,0 +1,555 @@ +//! Node.js process spawning for plugin runtime commands. +//! +//! Locates the bundled JS entry point and spawns `node` to execute +//! plugin-dependent commands (execute, dry-run, clean, plugins). + +use std::path::{Path, PathBuf}; +use std::process::{Command, ExitCode, Stdio}; +use std::sync::{Mutex, OnceLock}; + +use crate::{ + BridgeCommandResult, CliError, + diagnostic_helpers::{diagnostic, line, optional_details}, +}; + +use serde_json::Value; +use tnmsc_logger::create_logger; + +/// Strip Windows extended-length path prefix (`\\?\`) which Node.js cannot handle. +fn strip_win_prefix(path: PathBuf) -> PathBuf { + let s = path.to_string_lossy(); + if let Some(stripped) = s.strip_prefix(r"\\?\") { + PathBuf::from(stripped) + } else { + path + } +} + +const PACKAGE_NAME: &str = "@truenine/memory-sync-cli"; +static PLUGIN_RUNTIME_CACHE: OnceLock>> = OnceLock::new(); +static NODE_CACHE: OnceLock>> = OnceLock::new(); + +fn read_cached_success(cache: &Mutex>) -> Option { + match cache.lock() { + Ok(guard) => guard.clone(), + Err(poisoned) => poisoned.into_inner().clone(), + } +} + +fn store_cached_success(cache: &Mutex>, value: &T) { + match cache.lock() { + Ok(mut guard) => { + *guard = Some(value.clone()); + } + Err(poisoned) => { + *poisoned.into_inner() = Some(value.clone()); + } + } +} + +fn detect_with_cached_success(cache: &Mutex>, detect: F) -> Option +where + F: FnOnce() -> Option, +{ + if let Some(cached) = read_cached_success(cache) { + return Some(cached); + } + + let detected = detect(); + if let Some(value) = detected.as_ref() { + store_cached_success(cache, value); + } + detected +} + +/// Locate the plugin runtime JS entry point. +/// +/// Search order: +/// 1. `/plugin-runtime.mjs` (release archive: binary + JS co-located) +/// 2. `/../dist/plugin-runtime.mjs` (dev mode: sdk/dist/) +/// 3. `/../sdk/dist/plugin-runtime.mjs` (dev mode from repo root) +/// 4. `/../cli/dist/plugin-runtime.mjs` (published CLI shell asset copy) +/// 5. `/dist/plugin-runtime.mjs` (fallback) +/// 6. `/sdk/dist/plugin-runtime.mjs` (fallback from repo root cwd) +/// 7. `/cli/dist/plugin-runtime.mjs` (published CLI shell fallback from repo root cwd) +/// 8. npm/pnpm global install: `/@truenine/memory-sync-cli/dist/plugin-runtime.mjs` +/// 9. Embedded JS extracted to `~/.aindex/.cache/plugin-runtime-.mjs` +pub(crate) fn find_plugin_runtime() -> Option { + let cache = PLUGIN_RUNTIME_CACHE.get_or_init(|| Mutex::new(None)); + detect_with_cached_success(cache, detect_plugin_runtime) +} + +fn detect_plugin_runtime() -> Option { + let mut candidates: Vec = Vec::new(); + + // Relative to binary location + if let Ok(exe) = std::env::current_exe() + && let Some(exe_dir) = exe.parent() + { + candidates.push(exe_dir.join("plugin-runtime.mjs")); + candidates.push(exe_dir.join("../dist/plugin-runtime.mjs")); + candidates.push(exe_dir.join("../sdk/dist/plugin-runtime.mjs")); + candidates.push(exe_dir.join("../cli/dist/plugin-runtime.mjs")); + } + + // Relative to CWD + if let Ok(cwd) = std::env::current_dir() { + candidates.push(cwd.join("dist/plugin-runtime.mjs")); + candidates.push(cwd.join("sdk/dist/plugin-runtime.mjs")); + candidates.push(cwd.join("cli/dist/plugin-runtime.mjs")); + } + + // npm/pnpm global package locations + for global_root in find_npm_global_roots() { + candidates.push( + global_root + .join(PACKAGE_NAME) + .join("dist/plugin-runtime.mjs"), + ); + } + + for candidate in &candidates { + let normalized = candidate + .canonicalize() + .ok() + .unwrap_or_else(|| candidate.clone()); + if normalized.exists() { + return Some(strip_win_prefix(normalized)); + } + } + + // Last resort: extract embedded JS to cache + extract_embedded_runtime() +} + +/// Find pnpm/npm global node_modules roots. +fn find_npm_global_roots() -> Vec { + let mut roots = Vec::new(); + + // `pnpm root -g` output (preferred) + if let Some(path) = run_silent("pnpm", &["root", "-g"]) { + roots.push(PathBuf::from(path)); + } + + // `npm root -g` output + if let Some(path) = run_silent("npm", &["root", "-g"]) { + roots.push(PathBuf::from(path)); + } + + // Common fallback locations (pnpm first) + if let Some(home) = dirs::home_dir() { + roots.push(home.join("AppData/Local/pnpm/global/5/node_modules")); + roots.push(home.join("AppData/Local/pnpm/global/node_modules")); + roots.push(home.join(".local/share/pnpm/global/5/node_modules")); + roots.push(home.join(".local/share/pnpm/global/node_modules")); + roots.push(home.join("AppData/Roaming/npm/node_modules")); + roots.push(home.join(".npm-global/lib/node_modules")); + } + + // nvm-managed node paths + #[cfg(not(windows))] + if let Some(home) = dirs::home_dir() { + let nvm_dir = home.join(".nvm/versions/node"); + if let Ok(entries) = std::fs::read_dir(&nvm_dir) { + for entry in entries.flatten() { + roots.push(entry.path().join("lib/node_modules")); + } + } + } + + roots +} + +/// Run a command silently and return trimmed stdout. +fn run_silent(cmd: &str, args: &[&str]) -> Option { + Command::new(cmd) + .args(args) + .stdout(Stdio::piped()) + .stderr(Stdio::null()) + .output() + .ok() + .and_then(|o| { + if o.status.success() { + String::from_utf8(o.stdout) + .ok() + .map(|s| s.trim().to_string()) + } else { + None + } + }) + .filter(|s| !s.is_empty()) +} + +/// Embedded plugin-runtime.mjs content (set by build.rs, empty if not available). +/// This allows the standalone binary to work without an external JS file. +#[cfg(feature = "embedded-runtime")] +const EMBEDDED_RUNTIME: &str = include_str!(concat!(env!("OUT_DIR"), "/plugin-runtime.mjs")); + +/// Extract embedded JS to `~/.aindex/.cache/plugin-runtime-.mjs`. +#[cfg(not(feature = "embedded-runtime"))] +fn extract_embedded_runtime() -> Option { + None +} + +/// Extract embedded JS to `~/.aindex/.cache/plugin-runtime-.mjs`. +#[cfg(feature = "embedded-runtime")] +fn extract_embedded_runtime() -> Option { + let version = env!("CARGO_PKG_VERSION"); + let cache_dir = dirs::home_dir()?.join(".aindex/.cache"); + let cache_file = cache_dir.join(format!("plugin-runtime-{version}.mjs")); + + // Already extracted and up-to-date + if cache_file.exists() { + return Some(cache_file); + } + + // Extract + std::fs::create_dir_all(&cache_dir).ok()?; + std::fs::write(&cache_file, EMBEDDED_RUNTIME).ok()?; + Some(cache_file) +} + +/// Find the `node` executable. +pub(crate) fn find_node() -> Option { + let cache = NODE_CACHE.get_or_init(|| Mutex::new(None)); + detect_with_cached_success(cache, detect_node) +} + +fn detect_node() -> Option { + // Try `node` in PATH + if Command::new("node") + .arg("--version") + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status() + .is_ok() + { + return Some("node".to_string()); + } + None +} + +/// Run a Node.js plugin runtime command. +/// +/// Spawns: `node [--json] [extra_args...]` +/// Inherits stdin/stdout/stderr so the Node.js process output goes directly to terminal. +pub fn run_node_command(subcommand: &str, json_mode: bool, extra_args: &[&str]) -> ExitCode { + let logger = create_logger("NodeBridge", None); + + // Find node + let node = match find_node() { + Some(n) => n, + None => { + logger.error(diagnostic( + "NODE_RUNTIME_NOT_FOUND", + "Node.js runtime is required", + line("The `node` executable was not found in PATH."), + Some(line( + "Install Node.js and reopen this shell so `node --version` succeeds.", + )), + Some(vec![line( + "If Node.js is already installed, add its install directory to PATH.", + )]), + optional_details(serde_json::json!({ "subcommand": subcommand })), + )); + return ExitCode::FAILURE; + } + }; + + // Find plugin runtime + let runtime_path = match find_plugin_runtime() { + Some(p) => p, + None => { + logger.error(diagnostic( + "PLUGIN_RUNTIME_NOT_FOUND", + "Plugin runtime entry is missing", + line("No `plugin-runtime.mjs` file was found in the expected locations."), + Some(line( + "Build `@truenine/memory-sync-sdk` or install `@truenine/memory-sync-cli` so `plugin-runtime.mjs` is available.", + )), + Some(vec![line( + "Run `pnpm -F @truenine/memory-sync-sdk build` in the repository.", + )]), + optional_details(serde_json::json!({ "subcommand": subcommand })), + )); + logger.debug( + Value::String("Searched: binary dir, CWD, npm/pnpm global, embedded cache".into()), + None, + ); + return ExitCode::FAILURE; + } + }; + + logger.debug( + Value::String("spawning node process".into()), + Some(serde_json::json!({ + "node": &node, + "runtime": runtime_path.to_string_lossy(), + "subcommand": subcommand, + "json": json_mode + })), + ); + + let mut cmd = Command::new(&node); + cmd.arg(&runtime_path); + cmd.arg(subcommand); + + if json_mode { + cmd.arg("--json"); + } + + for arg in extra_args { + cmd.arg(arg); + } + + // Inherit stdio so Node.js output goes directly to terminal + cmd.stdin(Stdio::inherit()); + cmd.stdout(Stdio::inherit()); + cmd.stderr(Stdio::inherit()); + + match cmd.status() { + Ok(status) => { + if status.success() { + ExitCode::SUCCESS + } else { + ExitCode::from(status.code().unwrap_or(1) as u8) + } + } + Err(e) => { + logger.error(diagnostic( + "NODE_PROCESS_SPAWN_FAILED", + "Failed to start the Node.js subprocess", + line("The CLI could not spawn the `node` process."), + Some(line( + "Check that `node` is runnable in this shell and retry.", + )), + None, + optional_details(serde_json::json!({ + "subcommand": subcommand, + "error": e.to_string() + })), + )); + ExitCode::FAILURE + } + } +} + +/// Library mode: capture Node.js subprocess output and return structured result. +/// +/// Used by GUI backend and other Rust callers via [`crate::run_bridge_command`]. +/// Unlike [`run_node_command`] which inherits stdio for CLI terminal use, +/// this variant pipes stdout/stderr so the caller can inspect the output. +pub fn run_node_command_captured( + subcommand: &str, + cwd: &Path, + json_mode: bool, + extra_args: &[&str], +) -> Result { + let node = find_node().ok_or(CliError::NodeNotFound)?; + let runtime_path = find_plugin_runtime() + .ok_or_else(|| CliError::PluginRuntimeNotFound( + "plugin-runtime.mjs not found. Install via 'pnpm add -g @truenine/memory-sync-cli' or place plugin-runtime.mjs next to the binary.".into(), + ))?; + + let mut cmd = Command::new(&node); + cmd.arg(&runtime_path); + cmd.arg(subcommand); + + if json_mode { + cmd.arg("--json"); + } + + for arg in extra_args { + cmd.arg(arg); + } + + cmd.current_dir(cwd); + cmd.stdout(Stdio::piped()); + cmd.stderr(Stdio::piped()); + + let output = cmd.output()?; + + let exit_code = output.status.code().unwrap_or(-1); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + + if output.status.success() || (json_mode && !stdout.trim().is_empty()) { + Ok(BridgeCommandResult { + stdout, + stderr, + exit_code, + }) + } else { + Err(CliError::NodeProcessFailed { + code: exit_code, + stderr, + }) + } +} + +/// Run the fallback: spawn `node ` with full process.argv passthrough. +/// Used when plugin-runtime.mjs is not available but index.mjs is. +#[allow(dead_code)] +pub fn run_node_fallback(args: &[String]) -> ExitCode { + let logger = create_logger("NodeBridge", None); + + let node = match find_node() { + Some(n) => n, + None => { + logger.error(diagnostic( + "NODE_RUNTIME_NOT_FOUND", + "Node.js runtime is required", + line("The `node` executable was not found in PATH."), + Some(line( + "Install Node.js and reopen this shell so `node --version` succeeds.", + )), + Some(vec![line( + "If Node.js is already installed, add its install directory to PATH.", + )]), + optional_details(serde_json::json!({ "args": args })), + )); + return ExitCode::FAILURE; + } + }; + + // Find index.mjs (the existing TS CLI entry) + let index_path = find_index_mjs(); + let runtime = match index_path { + Some(p) => p, + None => { + logger.error(diagnostic( + "CLI_ENTRY_NOT_FOUND", + "CLI JavaScript entry is missing", + line("No `index.mjs` entry point was found for the fallback Node.js launcher."), + Some(line( + "Build `@truenine/memory-sync-sdk` before running the fallback launcher.", + )), + Some(vec![line( + "Run `pnpm -F @truenine/memory-sync-sdk build` in the repository.", + )]), + optional_details(serde_json::json!({ "args": args })), + )); + return ExitCode::FAILURE; + } + }; + + let mut cmd = Command::new(&node); + cmd.arg(&runtime); + for arg in args { + cmd.arg(arg); + } + cmd.stdin(Stdio::inherit()); + cmd.stdout(Stdio::inherit()); + cmd.stderr(Stdio::inherit()); + + match cmd.status() { + Ok(status) => { + if status.success() { + ExitCode::SUCCESS + } else { + ExitCode::from(status.code().unwrap_or(1) as u8) + } + } + Err(e) => { + logger.error(diagnostic( + "NODE_PROCESS_SPAWN_FAILED", + "Failed to start the Node.js subprocess", + line("The CLI could not spawn the `node` process."), + Some(line( + "Check that `node` is runnable in this shell and retry.", + )), + None, + optional_details(serde_json::json!({ + "args": args, + "error": e.to_string() + })), + )); + ExitCode::FAILURE + } + } +} + +#[allow(dead_code)] +fn find_index_mjs() -> Option { + let candidates: Vec = { + let mut c = Vec::new(); + if let Ok(exe) = std::env::current_exe() + && let Some(exe_dir) = exe.parent() + { + c.push(exe_dir.join("index.mjs")); + c.push(exe_dir.join("../dist/index.mjs")); + c.push(exe_dir.join("../sdk/dist/index.mjs")); + c.push(exe_dir.join("../cli/dist/index.mjs")); + } + if let Ok(cwd) = std::env::current_dir() { + c.push(cwd.join("dist/index.mjs")); + c.push(cwd.join("sdk/dist/index.mjs")); + c.push(cwd.join("cli/dist/index.mjs")); + } + c + }; + + for candidate in &candidates { + let normalized = candidate + .canonicalize() + .ok() + .unwrap_or_else(|| candidate.clone()); + if normalized.exists() { + return Some(strip_win_prefix(normalized)); + } + } + None +} + +#[cfg(test)] +mod tests { + use super::*; + use std::cell::Cell; + use std::sync::Mutex; + + #[test] + fn test_strip_win_prefix_with_prefix() { + let path = PathBuf::from(r"\\?\C:\Users\test\file.mjs"); + let result = strip_win_prefix(path); + assert_eq!(result, PathBuf::from(r"C:\Users\test\file.mjs")); + } + + #[test] + fn test_strip_win_prefix_without_prefix() { + let path = PathBuf::from(r"C:\Users\test\file.mjs"); + let result = strip_win_prefix(path.clone()); + assert_eq!(result, path); + } + + #[test] + fn test_strip_win_prefix_unix_path() { + let path = PathBuf::from("/home/user/file.mjs"); + let result = strip_win_prefix(path.clone()); + assert_eq!(result, path); + } + + #[test] + fn test_detect_with_cached_success_retries_until_success() { + let cache = Mutex::new(None); + let attempts = Cell::new(0); + + let first = detect_with_cached_success(&cache, || { + attempts.set(attempts.get() + 1); + Option::::None + }); + assert_eq!(first, None); + + let second = detect_with_cached_success(&cache, || { + attempts.set(attempts.get() + 1); + Some(String::from("node")) + }); + assert_eq!(second, Some(String::from("node"))); + + let third = detect_with_cached_success(&cache, || { + attempts.set(attempts.get() + 1); + Some(String::from("other")) + }); + assert_eq!(third, Some(String::from("node"))); + assert_eq!(attempts.get(), 2); + } +} diff --git a/sdk/src/cleanup/delete-targets.ts b/sdk/src/cleanup/delete-targets.ts new file mode 100644 index 00000000..4ed5c39e --- /dev/null +++ b/sdk/src/cleanup/delete-targets.ts @@ -0,0 +1,71 @@ +import * as path from 'node:path' +import {resolveAbsolutePath} from '../ProtectedDeletionGuard' + +export interface CompactedDeletionTargets { + readonly files: string[] + readonly dirs: string[] +} + +function stripTrailingSeparator(rawPath: string): string { + const {root} = path.parse(rawPath) + if (rawPath === root) return rawPath + return rawPath.endsWith(path.sep) ? rawPath.slice(0, -1) : rawPath +} + +export function isSameOrChildDeletionPath(candidate: string, parent: string): boolean { + const normalizedCandidate = stripTrailingSeparator(candidate) + const normalizedParent = stripTrailingSeparator(parent) + if (normalizedCandidate === normalizedParent) return true + return normalizedCandidate.startsWith(`${normalizedParent}${path.sep}`) +} + +export function compactDeletionTargets( + files: readonly string[], + dirs: readonly string[] +): CompactedDeletionTargets { + const filesByKey = new Map() + const dirsByKey = new Map() + + for (const filePath of files) { + const resolvedPath = resolveAbsolutePath(filePath) + filesByKey.set(resolvedPath, resolvedPath) + } + + for (const dirPath of dirs) { + const resolvedPath = resolveAbsolutePath(dirPath) + dirsByKey.set(resolvedPath, resolvedPath) + } + + const compactedDirs = new Map() + const sortedDirEntries = [...dirsByKey.entries()].sort((a, b) => a[0].length - b[0].length) + + for (const [dirKey, dirPath] of sortedDirEntries) { + let coveredByParent = false + for (const existingParentKey of compactedDirs.keys()) { + if (isSameOrChildDeletionPath(dirKey, existingParentKey)) { + coveredByParent = true + break + } + } + + if (!coveredByParent) compactedDirs.set(dirKey, dirPath) + } + + const compactedFiles: string[] = [] + for (const [fileKey, filePath] of filesByKey) { + let coveredByDir = false + for (const dirKey of compactedDirs.keys()) { + if (isSameOrChildDeletionPath(fileKey, dirKey)) { + coveredByDir = true + break + } + } + + if (!coveredByDir) compactedFiles.push(filePath) + } + + compactedFiles.sort((a, b) => a.localeCompare(b)) + const compactedDirPaths = [...compactedDirs.values()].sort((a, b) => a.localeCompare(b)) + + return {files: compactedFiles, dirs: compactedDirPaths} +} diff --git a/sdk/src/cleanup/empty-directories.ts b/sdk/src/cleanup/empty-directories.ts new file mode 100644 index 00000000..5ea8a881 --- /dev/null +++ b/sdk/src/cleanup/empty-directories.ts @@ -0,0 +1,114 @@ +import type * as fs from 'node:fs' +import {resolveAbsolutePath} from '../ProtectedDeletionGuard' + +const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES = new Set([ + '.git', + 'node_modules', + 'dist', + 'target', + '.next', + '.turbo', + 'coverage', + '.nyc_output', + '.cache', + '.vite', + '.vite-temp', + '.pnpm-store', + '.yarn', + '.idea', + '.volumes', + 'volumes' +]) + +export interface WorkspaceEmptyDirectoryPlan { + readonly emptyDirsToDelete: string[] +} + +export interface WorkspaceEmptyDirectoryPlannerOptions { + readonly fs: typeof import('node:fs') + readonly path: typeof import('node:path') + readonly workspaceDir: string + readonly filesToDelete: readonly string[] + readonly dirsToDelete: readonly string[] +} + +function shouldSkipEmptyDirectoryTree( + nodePath: typeof import('node:path'), + workspaceDir: string, + currentDir: string +): boolean { + if (currentDir === workspaceDir) return false + return EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES.has( + nodePath.basename(currentDir) + ) +} + +export function planWorkspaceEmptyDirectoryCleanup( + options: WorkspaceEmptyDirectoryPlannerOptions +): WorkspaceEmptyDirectoryPlan { + const workspaceDir = resolveAbsolutePath(options.workspaceDir) + const filesToDelete = new Set(options.filesToDelete.map(resolveAbsolutePath)) + const dirsToDelete = new Set(options.dirsToDelete.map(resolveAbsolutePath)) + const emptyDirsToDelete = new Set() + + // Track which directories are scheduled for deletion (dirsToDelete + emptyDirsToDelete) + const isScheduledForDeletion = (dirPath: string): boolean => dirsToDelete.has(dirPath) || emptyDirsToDelete.has(dirPath) + + const collectEmptyDirectories = (currentDir: string): boolean => { + if (isScheduledForDeletion(currentDir)) return true + if (shouldSkipEmptyDirectoryTree(options.path, workspaceDir, currentDir)) + { return false } + + let entries: fs.Dirent[] + try { + entries = options.fs.readdirSync(currentDir, {withFileTypes: true}) + } catch { + return false + } + + let hasRetainedEntries = false + + for (const entry of entries) { + const entryPath = resolveAbsolutePath( + options.path.join(currentDir, entry.name) + ) + + if (isScheduledForDeletion(entryPath)) continue + + if (entry.isDirectory()) { + if ( + shouldSkipEmptyDirectoryTree(options.path, workspaceDir, entryPath) + ) { + hasRetainedEntries = true + continue + } + + if (collectEmptyDirectories(entryPath)) { + emptyDirsToDelete.add(entryPath) + continue + } + + hasRetainedEntries = true + continue + } + + if (filesToDelete.has(entryPath)) continue + hasRetainedEntries = true + } + + return !hasRetainedEntries + } + + // Iteratively collect empty directories until no new ones are found + // This handles the case where deleting a child directory makes its parent empty + let previousSize = -1 + while (emptyDirsToDelete.size !== previousSize) { + previousSize = emptyDirsToDelete.size + collectEmptyDirectories(workspaceDir) + } + + return { + emptyDirsToDelete: [...emptyDirsToDelete].sort((a, b) => + a.localeCompare(b)) + } +} diff --git a/sdk/src/cli-runtime.test.ts b/sdk/src/cli-runtime.test.ts new file mode 100644 index 00000000..ab877f20 --- /dev/null +++ b/sdk/src/cli-runtime.test.ts @@ -0,0 +1,67 @@ +import {afterEach, describe, expect, it, vi} from 'vitest' + +const { + createDefaultPluginConfigMock, + pipelineRunMock, + pluginPipelineCtorMock +} = vi.hoisted(() => ({ + createDefaultPluginConfigMock: vi.fn(), + pipelineRunMock: vi.fn(), + pluginPipelineCtorMock: vi.fn() +})) + +vi.mock('./plugin.config', () => ({ + createDefaultPluginConfig: createDefaultPluginConfigMock +})) + +vi.mock('./PluginPipeline', () => ({ + PluginPipeline: function MockPluginPipeline(...args: unknown[]) { + pluginPipelineCtorMock(...args) + return { + run: pipelineRunMock + } + } +})) + +afterEach(() => { + vi.clearAllMocks() + vi.resetModules() +}) + +describe('cli runtime lightweight commands', () => { + it('does not load plugin config for --version', async () => { + const {runCli} = await import('./cli-runtime') + + const exitCode = await runCli(['node', 'tnmsc', '--version']) + + expect(exitCode).toBe(0) + expect(createDefaultPluginConfigMock).not.toHaveBeenCalled() + expect(pluginPipelineCtorMock).not.toHaveBeenCalled() + expect(pipelineRunMock).not.toHaveBeenCalled() + }) + + it('emits JSON for --version --json without loading plugin config', async () => { + const {runCli} = await import('./cli-runtime') + const writeSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + + try { + const exitCode = await runCli(['node', 'tnmsc', '--version', '--json']) + + expect(exitCode).toBe(0) + expect(createDefaultPluginConfigMock).not.toHaveBeenCalled() + expect(pluginPipelineCtorMock).not.toHaveBeenCalled() + expect(pipelineRunMock).not.toHaveBeenCalled() + + const payload = JSON.parse(String(writeSpy.mock.calls[0]?.[0])) as { + readonly success: boolean + readonly message?: string + } + + expect(payload.success).toBe(true) + expect(payload.message).toBe('Version displayed') + } + finally { + writeSpy.mockRestore() + } + }) +}) diff --git a/sdk/src/cli-runtime.ts b/sdk/src/cli-runtime.ts new file mode 100644 index 00000000..213b8bdf --- /dev/null +++ b/sdk/src/cli-runtime.ts @@ -0,0 +1,106 @@ +import type {Command, CommandContext, CommandResult} from '@/commands/Command' +import * as path from 'node:path' +import process from 'node:process' +import {JsonOutputCommand, toJsonCommandResult} from '@/commands/JsonOutputCommand' +import {buildUnhandledExceptionDiagnostic} from '@/diagnostics' +import {PluginPipeline} from '@/PluginPipeline' +import {mergeConfig} from './config' +import {extractUserArgs, parseArgs, resolveCommand} from './pipeline/CliArgumentParser' +import {createDefaultPluginConfig} from './plugin.config' +import {createLogger, drainBufferedDiagnostics, FilePathKind, setGlobalLogLevel} from './plugins/plugin-core' + +const LIGHTWEIGHT_COMMAND_NAMES = new Set(['help', 'version', 'unknown']) + +export function isJsonMode(argv: readonly string[]): boolean { + return argv.some(arg => arg === '--json' || arg === '-j' || /^-[^-]*j/.test(arg)) +} + +function writeJsonFailure(error: unknown): void { + const errorMessage = error instanceof Error ? error.message : String(error) + const logger = createLogger('main', 'silent') + logger.error(buildUnhandledExceptionDiagnostic('main', error)) + process.stdout.write(`${JSON.stringify(toJsonCommandResult({ + success: false, + filesAffected: 0, + dirsAffected: 0, + message: errorMessage + }, drainBufferedDiagnostics()))}\n`) +} + +function createUnavailableContext(kind: 'cleanup' | 'write'): never { + throw new Error(`${kind} context is unavailable for lightweight commands`) +} + +function createLightweightCommandContext(logLevel: ReturnType['logLevel']): CommandContext { + const workspaceDir = process.cwd() + const userConfigOptions = mergeConfig({ + workspaceDir, + ...logLevel != null ? {logLevel} : {} + }) + + return { + logger: createLogger('PluginPipeline', logLevel), + outputPlugins: [], + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir) + }, + projects: [] + } + }, + userConfigOptions, + createCleanContext: () => createUnavailableContext('cleanup'), + createWriteContext: () => createUnavailableContext('write') + } +} + +function resolveLightweightCommand(argv: readonly string[]): { + readonly command: Command + readonly context: CommandContext +} | undefined { + const filteredArgs = argv.filter((arg): arg is string => arg != null) + const parsedArgs = parseArgs(extractUserArgs(filteredArgs)) + let command: Command = resolveCommand(parsedArgs) + + if (!LIGHTWEIGHT_COMMAND_NAMES.has(command.name)) return void 0 + + if (parsedArgs.logLevel != null) setGlobalLogLevel(parsedArgs.logLevel) + + if (parsedArgs.jsonFlag) { + setGlobalLogLevel('silent') + command = new JsonOutputCommand(command) + } + + return { + command, + context: createLightweightCommandContext(parsedArgs.logLevel) + } +} + +export async function runCli(argv: readonly string[] = process.argv): Promise { + try { + const lightweightCommand = resolveLightweightCommand(argv) + if (lightweightCommand != null) { + const result: CommandResult = await lightweightCommand.command.execute(lightweightCommand.context) + return result.success ? 0 : 1 + } + + const pipeline = new PluginPipeline(...argv) + const userPluginConfig = await createDefaultPluginConfig(argv) + const result = await pipeline.run(userPluginConfig) + return result.success ? 0 : 1 + } + catch (error) { + if (isJsonMode(argv)) { + writeJsonFailure(error) + return 1 + } + + const logger = createLogger('main', 'error') + logger.error(buildUnhandledExceptionDiagnostic('main', error)) + return 1 + } +} diff --git a/sdk/src/commands/CleanCommand.ts b/sdk/src/commands/CleanCommand.ts new file mode 100644 index 00000000..bb8be0a8 --- /dev/null +++ b/sdk/src/commands/CleanCommand.ts @@ -0,0 +1,34 @@ +import type {Command, CommandContext, CommandResult} from './Command' +import {performCleanup} from './CleanupUtils' + +/** + * Clean command - deletes registered output files and directories + */ +export class CleanCommand implements Command { + readonly name = 'clean' + + async execute(ctx: CommandContext): Promise { + const {logger, outputPlugins, createCleanContext} = ctx + logger.info('running clean pipeline', {command: 'clean'}) + + const cleanCtx = createCleanContext(false) + const result = await performCleanup(outputPlugins, cleanCtx, logger) + + if (result.violations.length > 0 || result.conflicts.length > 0) { + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + ...result.message != null ? {message: result.message} : {} + } + } + + logger.info('clean complete', {deletedFiles: result.deletedFiles, deletedDirs: result.deletedDirs}) + + return { + success: true, + filesAffected: result.deletedFiles, + dirsAffected: result.deletedDirs + } + } +} diff --git a/sdk/src/commands/CleanupUtils.adapter.test.ts b/sdk/src/commands/CleanupUtils.adapter.test.ts new file mode 100644 index 00000000..069ea3ab --- /dev/null +++ b/sdk/src/commands/CleanupUtils.adapter.test.ts @@ -0,0 +1,156 @@ +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it, vi} from 'vitest' +import {FilePathKind, PluginKind} from '../plugins/plugin-core' + +const nativeBindingMocks = vi.hoisted(() => ({ + planCleanup: vi.fn<(snapshotJson: string) => string>(), + performCleanup: vi.fn<(snapshotJson: string) => string>() +})) + +vi.mock('../core/native-binding', () => ({ + getNativeBinding: () => ({ + ...globalThis.__TNMSC_TEST_NATIVE_BINDING__, + planCleanup: nativeBindingMocks.planCleanup, + performCleanup: nativeBindingMocks.performCleanup + }) +})) + +const cleanupModulePromise = import('./CleanupUtils') + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger +} + +function createCleanContext(workspaceDir: string): OutputCleanContext { + return { + logger: createMockLogger(), + fs, + path, + glob, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [ + { + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + } + } + ] + }, + aindexDir: path.join(workspaceDir, 'aindex') + } + } as OutputCleanContext +} + +function createMockOutputPlugin(): OutputPlugin { + return { + type: PluginKind.Output, + name: 'MockOutputPlugin', + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return [{path: path.join('/tmp', 'project-a', 'AGENTS.md'), source: {}}] + }, + async declareCleanupPaths(): Promise { + return { + delete: [{kind: 'glob', path: path.join('/tmp', '.codex', 'skills', '*'), excludeBasenames: ['.system']}] + } + }, + async convertContent() { + return 'test' + } + } +} + +describe('cleanupUtils native adapter', () => { + it('uses the native cleanup bridge when it is available', async () => { + nativeBindingMocks.planCleanup.mockReset() + nativeBindingMocks.performCleanup.mockReset() + + nativeBindingMocks.planCleanup.mockReturnValue( + JSON.stringify({ + filesToDelete: ['/tmp/project-a/AGENTS.md'], + dirsToDelete: ['/tmp/.codex/skills/legacy'], + emptyDirsToDelete: ['/tmp/.codex/skills'], + violations: [], + conflicts: [], + excludedScanGlobs: ['**/.git/**'] + }) + ) + nativeBindingMocks.performCleanup.mockReturnValue( + JSON.stringify({ + deletedFiles: 1, + deletedDirs: 2, + errors: [], + violations: [], + conflicts: [], + filesToDelete: ['/tmp/project-a/AGENTS.md'], + dirsToDelete: ['/tmp/.codex/skills/legacy'], + emptyDirsToDelete: ['/tmp/.codex/skills'], + excludedScanGlobs: ['**/.git/**'] + }) + ) + + const {collectDeletionTargets, hasNativeCleanupBinding, performCleanup} = await cleanupModulePromise + const workspaceDir = path.resolve('tmp-native-cleanup-adapter') + const cleanCtx = createCleanContext(workspaceDir) + const plugin = createMockOutputPlugin() + + expect(hasNativeCleanupBinding()).toBe(true) + + const plan = await collectDeletionTargets([plugin], cleanCtx) + expect(plan).toEqual({ + filesToDelete: ['/tmp/project-a/AGENTS.md'], + dirsToDelete: ['/tmp/.codex/skills/legacy'], + emptyDirsToDelete: ['/tmp/.codex/skills'], + violations: [], + conflicts: [], + excludedScanGlobs: ['**/.git/**'] + }) + expect(nativeBindingMocks.planCleanup).toHaveBeenCalledOnce() + + const planSnapshot = JSON.parse(String(nativeBindingMocks.planCleanup.mock.calls[0]?.[0])) as { + readonly pluginSnapshots: readonly {pluginName: string, outputs: readonly string[], cleanup: {delete?: readonly {kind: string}[]}}[] + } + expect(planSnapshot.pluginSnapshots).toEqual([ + expect.objectContaining({ + pluginName: 'MockOutputPlugin', + outputs: ['/tmp/project-a/AGENTS.md'], + cleanup: expect.objectContaining({ + delete: [expect.objectContaining({kind: 'glob'})] + }) + }) + ]) + + const result = await performCleanup([plugin], cleanCtx, createMockLogger()) + expect(result).toEqual({ + deletedFiles: 1, + deletedDirs: 3, + errors: [], + violations: [], + conflicts: [] + }) + expect(nativeBindingMocks.performCleanup).toHaveBeenCalledOnce() + }) +}) diff --git a/sdk/src/commands/CleanupUtils.test.ts b/sdk/src/commands/CleanupUtils.test.ts new file mode 100644 index 00000000..9d4f9f62 --- /dev/null +++ b/sdk/src/commands/CleanupUtils.test.ts @@ -0,0 +1,782 @@ +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {FilePathKind, IDEKind, PluginKind} from '../plugins/plugin-core' +import {collectDeletionTargets, performCleanup} from './CleanupUtils' + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger +} + +function createRecordingLogger(): ILogger & {debugMessages: unknown[]} { + const debugMessages: unknown[] = [] + + return { + debugMessages, + trace: () => {}, + debug: message => { + debugMessages.push(message) + }, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger & {debugMessages: unknown[]} +} + +function createCleanContext( + overrides?: Partial, + pluginOptionsOverrides?: Parameters[0] +): OutputCleanContext { + const workspaceDir = path.resolve('tmp-cleanup-utils-workspace') + return { + logger: createMockLogger(), + fs, + path, + glob, + dryRun: true, + pluginOptions: mergeConfig(pluginOptionsOverrides ?? {}), + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + }, + ...overrides + } + } as OutputCleanContext +} + +function createMockOutputPlugin(name: string, outputs: readonly string[], cleanup?: OutputCleanupDeclarations): OutputPlugin { + return { + type: PluginKind.Output, + name, + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return outputs.map(output => ({path: output, source: {}})) + }, + async declareCleanupPaths() { + return cleanup ?? {} + }, + async convertContent() { + return '' + } + } +} + +describe('collectDeletionTargets', () => { + it('throws when an output path matches a protected input source file', async () => { + const editorSource = path.resolve('tmp-aindex/public/.editorconfig') + const ignoreSource = path.resolve('tmp-aindex/public/.cursorignore') + + const ctx = createCleanContext({ + editorConfigFiles: [ + { + type: IDEKind.EditorConfig, + content: 'root = true', + length: 11, + filePathKind: FilePathKind.Absolute, + dir: { + pathKind: FilePathKind.Absolute, + path: editorSource, + getDirectoryName: () => '.editorconfig' + } + } + ], + aiAgentIgnoreConfigFiles: [ + { + fileName: '.cursorignore', + content: 'node_modules', + sourcePath: ignoreSource + } + ] + }) + + const plugin = createMockOutputPlugin('MockOutputPlugin', [editorSource, ignoreSource]) + + await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') + }) + + it('keeps non-overlapping output paths for cleanup', async () => { + const outputA = path.resolve('tmp-out/a.md') + const outputB = path.resolve('tmp-out/b.md') + const ctx = createCleanContext() + const plugin = createMockOutputPlugin('MockOutputPlugin', [outputA, outputB]) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(new Set(result.filesToDelete)).toEqual(new Set([outputA, outputB])) + expect(result.violations).toEqual([]) + }) + + it('throws when an output path matches a known aindex protected config file', async () => { + const aindexDir = path.resolve('tmp-aindex') + const editorConfigOutput = path.resolve(aindexDir, 'public', '.editorconfig') + const ctx = createCleanContext({aindexDir}) + const plugin = createMockOutputPlugin('MockOutputPlugin', [editorConfigOutput]) + + await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') + }) + + it('compacts nested delete targets to reduce IO', async () => { + const claudeBaseDir = path.resolve('tmp-out/.claude') + const ruleDir = path.join(claudeBaseDir, 'rules') + const ruleFile = path.join(ruleDir, 'a.md') + const ctx = createCleanContext() + const plugin = createMockOutputPlugin('MockOutputPlugin', [ruleFile], { + delete: [ + {kind: 'directory', path: claudeBaseDir}, + {kind: 'directory', path: ruleDir}, + {kind: 'file', path: ruleFile} + ] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([claudeBaseDir]) + expect(result.filesToDelete).toEqual([]) + }) + + it('skips parent deletion when a protected child path exists', async () => { + const codexBaseDir = path.resolve('tmp-out/.codex') + const promptsDir = path.join(codexBaseDir, 'prompts') + const protectedSystemDir = path.join(codexBaseDir, 'skills', '.system') + const ctx = createCleanContext() + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [ + {kind: 'directory', path: codexBaseDir}, + {kind: 'directory', path: promptsDir} + ], + protect: [{kind: 'directory', path: protectedSystemDir}] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([promptsDir]) + expect(result.violations.map(violation => violation.targetPath)).toEqual([codexBaseDir]) + }) + + it('blocks deleting dangerous roots and returns the most specific matching rule', async () => { + const homeDir = os.homedir() + const ctx = createCleanContext() + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [{kind: 'directory', path: homeDir}] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([]) + expect(result.filesToDelete).toEqual([]) + expect(result.violations).toEqual([ + expect.objectContaining({ + targetPath: path.resolve(homeDir), + protectedPath: path.resolve('tmp-cleanup-utils-workspace', 'knowladge'), + protectionMode: 'direct' + }) + ]) + }) + + it('throws when an output path matches a built-in protected path before directory guards run', async () => { + const workspaceDir = path.resolve('tmp-workspace-root') + const projectRoot = path.join(workspaceDir, 'project-a') + const aindexDir = path.join(workspaceDir, 'aindex') + const globalAindexDir = path.join(os.homedir(), '.aindex') + const globalConfigPath = path.join(globalAindexDir, '.tnmsc.json') + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [ + { + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => projectRoot + } + } + ] + }, + aindexDir + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [globalConfigPath], { + delete: [ + {kind: 'directory', path: globalAindexDir}, + {kind: 'directory', path: workspaceDir}, + {kind: 'directory', path: projectRoot}, + {kind: 'directory', path: aindexDir} + ] + }) + + await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow( + `Cleanup protection conflict: 1 output path(s) are also protected: ${path.resolve(globalConfigPath)}` + ) + }) + + it('allows deleting non-mdx files under dist while blocking reserved dist mdx files', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-dist-mdx-')) + const workspaceDir = path.join(tempDir, 'workspace') + const distCommandDir = path.join(workspaceDir, 'aindex', 'dist', 'commands') + const projectChildFile = path.join(workspaceDir, 'project-a', 'AGENTS.md') + const protectedDistMdxFile = path.join(distCommandDir, 'demo.mdx') + const safeDistMarkdownFile = path.join(distCommandDir, 'README.md') + const globalChildDir = path.join(os.homedir(), '.aindex', '.codex', 'prompts') + const aindexSourceDir = path.join(workspaceDir, 'aindex', 'commands') + + fs.mkdirSync(path.dirname(projectChildFile), {recursive: true}) + fs.mkdirSync(distCommandDir, {recursive: true}) + fs.mkdirSync(aindexSourceDir, {recursive: true}) + fs.writeFileSync(projectChildFile, '# agent', 'utf8') + fs.writeFileSync(protectedDistMdxFile, '# compiled', 'utf8') + fs.writeFileSync(safeDistMarkdownFile, '# doc', 'utf8') + + try { + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [ + { + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + } + } + ] + }, + aindexDir: path.join(workspaceDir, 'aindex') + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [projectChildFile, safeDistMarkdownFile], { + delete: [ + {kind: 'file', path: protectedDistMdxFile}, + {kind: 'directory', path: globalChildDir}, + {kind: 'directory', path: aindexSourceDir} + ] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(new Set(result.filesToDelete)).toEqual(new Set([path.resolve(projectChildFile), path.resolve(safeDistMarkdownFile)])) + const allDirsToDelete = [...result.dirsToDelete, ...result.emptyDirsToDelete] + expect(new Set(allDirsToDelete)).toEqual(new Set([path.resolve(globalChildDir), path.resolve(aindexSourceDir), path.resolve(workspaceDir, 'project-a')])) + expect(result.violations).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + targetPath: path.resolve(protectedDistMdxFile), + protectionMode: 'direct', + protectedPath: path.resolve(protectedDistMdxFile) + }), + expect.objectContaining({targetPath: path.resolve(aindexSourceDir)}) + ]) + ) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('blocks deleting a dist directory when protected mdx descendants exist', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-dist-dir-')) + const workspaceDir = path.join(tempDir, 'workspace') + const distCommandDir = path.join(workspaceDir, 'aindex', 'dist', 'commands') + const protectedDistMdxFile = path.join(distCommandDir, 'demo.mdx') + + fs.mkdirSync(distCommandDir, {recursive: true}) + fs.writeFileSync(protectedDistMdxFile, '# compiled', 'utf8') + + try { + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + }, + aindexDir: path.join(workspaceDir, 'aindex') + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [{kind: 'directory', path: distCommandDir}] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([]) + expect(result.filesToDelete).toEqual([]) + expect(result.violations).toEqual([ + expect.objectContaining({ + targetPath: path.resolve(distCommandDir), + protectionMode: 'direct', + protectedPath: path.resolve(protectedDistMdxFile) + }) + ]) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('allows deleting non-mdx files under app while blocking reserved app mdx files', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-app-mdx-')) + const workspaceDir = path.join(tempDir, 'workspace') + const appDir = path.join(workspaceDir, 'aindex', 'app') + const protectedAppMdxFile = path.join(appDir, 'guide.mdx') + const safeAppMarkdownFile = path.join(appDir, 'README.md') + + fs.mkdirSync(appDir, {recursive: true}) + fs.writeFileSync(protectedAppMdxFile, '# app guide', 'utf8') + fs.writeFileSync(safeAppMarkdownFile, '# readme', 'utf8') + + try { + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + }, + aindexDir: path.join(workspaceDir, 'aindex') + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [safeAppMarkdownFile], { + delete: [{kind: 'file', path: protectedAppMdxFile}] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.filesToDelete).toEqual([]) + expect(result.violations).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + targetPath: path.resolve(protectedAppMdxFile), + protectionMode: 'direct', + protectedPath: path.resolve(protectedAppMdxFile) + }), + expect.objectContaining({targetPath: path.resolve(safeAppMarkdownFile)}) + ]) + ) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('throws when an output file path exactly matches a cleanup protect declaration', async () => { + const outputPath = path.resolve('tmp-out/protected.md') + const ctx = createCleanContext() + const plugin = createMockOutputPlugin('MockOutputPlugin', [outputPath], { + protect: [{kind: 'file', path: outputPath}] + }) + + await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') + }) + + it('blocks deleting an app directory when protected mdx descendants exist', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-app-dir-')) + const workspaceDir = path.join(tempDir, 'workspace') + const appSubDir = path.join(workspaceDir, 'aindex', 'app', 'nested') + const protectedAppMdxFile = path.join(appSubDir, 'guide.mdx') + + fs.mkdirSync(appSubDir, {recursive: true}) + fs.writeFileSync(protectedAppMdxFile, '# app guide', 'utf8') + + try { + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + }, + aindexDir: path.join(workspaceDir, 'aindex') + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [{kind: 'directory', path: path.join(workspaceDir, 'aindex', 'app')}] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([]) + expect(result.filesToDelete).toEqual([]) + expect(result.violations).toEqual([ + expect.objectContaining({ + targetPath: path.resolve(path.join(workspaceDir, 'aindex', 'app')), + protectionMode: 'direct', + protectedPath: path.resolve(protectedAppMdxFile) + }) + ]) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('blocks symlink targets that resolve to a protected path and keeps the most specific match', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-guard-')) + const workspaceDir = path.join(tempDir, 'workspace') + const symlinkPath = path.join(tempDir, 'workspace-link') + + fs.mkdirSync(workspaceDir, {recursive: true}) + + try { + const symlinkType: 'junction' | 'dir' = process.platform === 'win32' ? 'junction' : 'dir' + fs.symlinkSync(workspaceDir, symlinkPath, symlinkType) + + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + } + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [{kind: 'directory', path: symlinkPath}] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([]) + expect(result.violations).toEqual([ + expect.objectContaining({ + targetPath: path.resolve(symlinkPath), + protectedPath: path.resolve(path.join(workspaceDir, 'knowladge')), + protectionMode: 'direct' + }) + ]) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('lets direct protect declarations keep descendants deletable while recursive protect declarations block them', async () => { + const workspaceDir = path.resolve('tmp-direct-vs-recursive') + const directProtectedDir = path.join(workspaceDir, 'project-a') + const recursiveProtectedDir = path.join(workspaceDir, 'aindex', 'dist') + const directChildFile = path.join(directProtectedDir, 'AGENTS.md') + const recursiveChildFile = path.join(recursiveProtectedDir, 'commands', 'demo.mdx') + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + } + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [directChildFile, recursiveChildFile], { + protect: [ + {kind: 'directory', path: directProtectedDir, protectionMode: 'direct'}, + {kind: 'directory', path: recursiveProtectedDir, protectionMode: 'recursive'} + ] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.filesToDelete).toEqual([path.resolve(directChildFile)]) + expect(result.violations).toEqual([ + expect.objectContaining({ + targetPath: path.resolve(recursiveChildFile), + protectionMode: 'recursive', + protectedPath: path.resolve(recursiveProtectedDir) + }) + ]) + }) + + it('skips delete glob matches covered by excludeScanGlobs while still deleting other sibling directories', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-exclude-glob-')) + const skillsDir = path.join(tempDir, '.cursor', 'skills-cursor') + const preservedDir = path.join(skillsDir, 'create-rule') + const staleDir = path.join(skillsDir, 'legacy-skill') + + fs.mkdirSync(preservedDir, {recursive: true}) + fs.mkdirSync(staleDir, {recursive: true}) + fs.writeFileSync(path.join(preservedDir, 'SKILL.md'), '# preserved', 'utf8') + fs.writeFileSync(path.join(staleDir, 'SKILL.md'), '# stale', 'utf8') + + try { + const ctx = createCleanContext() + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [{kind: 'glob', path: path.join(skillsDir, '*')}], + protect: [{kind: 'directory', path: preservedDir}], + excludeScanGlobs: [preservedDir, path.join(preservedDir, '**')] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([path.resolve(staleDir)]) + expect(result.filesToDelete).toEqual([]) + expect(result.violations).toEqual([]) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('throws when an output path matches the configured workspace prompt source file', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-workspace-src-')) + const workspaceDir = path.join(tempDir, 'workspace') + const aindexDir = path.join(workspaceDir, 'aindex-meta') + const workspacePromptSource = path.join(aindexDir, 'meta', 'workspace.src.mdx') + + fs.mkdirSync(path.dirname(workspacePromptSource), {recursive: true}) + fs.writeFileSync(workspacePromptSource, '# workspace', 'utf8') + + try { + const ctx = createCleanContext( + { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + }, + aindexDir + }, + { + workspaceDir, + aindex: { + dir: 'aindex-meta', + workspacePrompt: { + src: 'meta/workspace.src.mdx', + dist: 'compiled/workspace.mdx' + } + } + } as Parameters[0] + ) + const plugin = createMockOutputPlugin('MockOutputPlugin', [workspacePromptSource]) + + await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('plans workspace empty directories while skipping excluded trees and symlink entries', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-empty-sweep-')) + const workspaceDir = path.join(tempDir, 'workspace') + const sourceLeafDir = path.join(workspaceDir, 'source', 'empty', 'leaf') + const sourceKeepFile = path.join(workspaceDir, 'source', 'keep.md') + const distEmptyDir = path.join(workspaceDir, 'dist', 'ghost') + const nodeModulesEmptyDir = path.join(workspaceDir, 'node_modules', 'pkg', 'ghost') + const gitEmptyDir = path.join(workspaceDir, '.git', 'objects', 'info') + const symlinkTarget = path.join(tempDir, 'symlink-target') + const symlinkParentDir = path.join(workspaceDir, 'symlink-parent') + const symlinkPath = path.join(symlinkParentDir, 'linked') + + fs.mkdirSync(sourceLeafDir, {recursive: true}) + fs.mkdirSync(path.dirname(sourceKeepFile), {recursive: true}) + fs.mkdirSync(distEmptyDir, {recursive: true}) + fs.mkdirSync(nodeModulesEmptyDir, {recursive: true}) + fs.mkdirSync(gitEmptyDir, {recursive: true}) + fs.mkdirSync(symlinkTarget, {recursive: true}) + fs.mkdirSync(symlinkParentDir, {recursive: true}) + fs.writeFileSync(sourceKeepFile, '# keep', 'utf8') + + try { + const symlinkType: 'junction' | 'dir' = process.platform === 'win32' ? 'junction' : 'dir' + fs.symlinkSync(symlinkTarget, symlinkPath, symlinkType) + + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + } + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', []) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.filesToDelete).toEqual([]) + expect(result.dirsToDelete).toEqual([]) + expect(result.emptyDirsToDelete).toEqual([path.resolve(workspaceDir, 'source', 'empty'), path.resolve(sourceLeafDir)]) + expect(result.emptyDirsToDelete).not.toContain(path.resolve(workspaceDir)) + expect(result.emptyDirsToDelete).not.toContain(path.resolve(distEmptyDir)) + expect(result.emptyDirsToDelete).not.toContain(path.resolve(nodeModulesEmptyDir)) + expect(result.emptyDirsToDelete).not.toContain(path.resolve(gitEmptyDir)) + expect(result.emptyDirsToDelete).not.toContain(path.resolve(symlinkParentDir)) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) +}) + +describe('performCleanup', () => { + it('deletes files and directories in one cleanup pass', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-perform-cleanup-')) + const outputFile = path.join(tempDir, 'project-a', 'AGENTS.md') + const outputDir = path.join(tempDir, '.codex', 'prompts') + const stalePrompt = path.join(outputDir, 'demo.md') + + fs.mkdirSync(path.dirname(outputFile), {recursive: true}) + fs.mkdirSync(outputDir, {recursive: true}) + fs.writeFileSync(outputFile, '# agent', 'utf8') + fs.writeFileSync(stalePrompt, '# prompt', 'utf8') + + try { + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: tempDir, + getDirectoryName: () => path.basename(tempDir), + getAbsolutePath: () => tempDir + }, + projects: [] + } + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [outputFile], { + delete: [{kind: 'directory', path: outputDir}] + }) + + const result = await performCleanup([plugin], ctx, createMockLogger()) + + expect(result).toEqual( + expect.objectContaining({ + deletedFiles: 1, + deletedDirs: 3, + errors: [], + violations: [], + conflicts: [] + }) + ) + expect(fs.existsSync(outputFile)).toBe(false) + expect(fs.existsSync(outputDir)).toBe(false) + expect(fs.existsSync(path.dirname(outputFile))).toBe(false) + expect(fs.existsSync(path.dirname(outputDir))).toBe(false) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('logs aggregated cleanup execution summaries instead of per-path success logs', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-perform-cleanup-logging-')) + const outputFile = path.join(tempDir, 'project-a', 'AGENTS.md') + const outputDir = path.join(tempDir, '.codex', 'prompts') + const stalePrompt = path.join(outputDir, 'demo.md') + const logger = createRecordingLogger() + + fs.mkdirSync(path.dirname(outputFile), {recursive: true}) + fs.mkdirSync(outputDir, {recursive: true}) + fs.writeFileSync(outputFile, '# agent', 'utf8') + fs.writeFileSync(stalePrompt, '# prompt', 'utf8') + + try { + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: tempDir, + getDirectoryName: () => path.basename(tempDir), + getAbsolutePath: () => tempDir + }, + projects: [] + } + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [outputFile], { + delete: [{kind: 'directory', path: outputDir}] + }) + + await performCleanup([plugin], ctx, logger) + + expect(logger.debugMessages).toEqual( + expect.arrayContaining(['cleanup plan built', 'cleanup delete execution started', 'cleanup delete execution complete']) + ) + expect(logger.debugMessages).not.toContainEqual(expect.objectContaining({path: outputFile})) + expect(logger.debugMessages).not.toContainEqual(expect.objectContaining({path: outputDir})) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('deletes generated files and then prunes workspace empty directories', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-perform-cleanup-empty-sweep-')) + const outputFile = path.join(tempDir, 'generated', 'AGENTS.md') + const emptyLeafDir = path.join(tempDir, 'scratch', 'empty', 'leaf') + const retainedScratchFile = path.join(tempDir, 'scratch', 'keep.md') + + fs.mkdirSync(path.dirname(outputFile), {recursive: true}) + fs.mkdirSync(emptyLeafDir, {recursive: true}) + fs.mkdirSync(path.dirname(retainedScratchFile), {recursive: true}) + fs.writeFileSync(outputFile, '# agent', 'utf8') + fs.writeFileSync(retainedScratchFile, '# keep', 'utf8') + + try { + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: tempDir, + getDirectoryName: () => path.basename(tempDir), + getAbsolutePath: () => tempDir + }, + projects: [] + } + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [outputFile]) + + const result = await performCleanup([plugin], ctx, createMockLogger()) + + expect(result).toEqual( + expect.objectContaining({ + deletedFiles: 1, + deletedDirs: 3, + errors: [], + violations: [], + conflicts: [] + }) + ) + expect(fs.existsSync(outputFile)).toBe(false) + expect(fs.existsSync(path.dirname(outputFile))).toBe(false) + expect(fs.existsSync(path.join(tempDir, 'scratch', 'empty', 'leaf'))).toBe(false) + expect(fs.existsSync(path.join(tempDir, 'scratch', 'empty'))).toBe(false) + expect(fs.existsSync(path.join(tempDir, 'scratch'))).toBe(true) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/commands/CleanupUtils.ts b/sdk/src/commands/CleanupUtils.ts new file mode 100644 index 00000000..9c80a82c --- /dev/null +++ b/sdk/src/commands/CleanupUtils.ts @@ -0,0 +1,462 @@ +import type { + ILogger, + OutputCleanContext, + OutputCleanupDeclarations, + OutputCleanupPathDeclaration, + OutputFileDeclaration, + OutputPlugin, + PluginOptions +} from '../plugins/plugin-core' +import type {ProtectionMode, ProtectionRuleMatcher} from '../ProtectedDeletionGuard' +import {buildDiagnostic, buildFileOperationDiagnostic, diagnosticLines} from '@/diagnostics' +import {loadAindexProjectConfig} from '../aindex-config/AindexProjectConfigLoader' +import {getNativeBinding} from '../core/native-binding' +import {collectAllPluginOutputs} from '../plugins/plugin-core' +import { + collectConfiguredAindexInputRules, + collectProjectRoots, + collectProtectedInputSourceRules, + logProtectedDeletionGuardError +} from '../ProtectedDeletionGuard' + +let nativeCleanupBindingCheck: boolean | null = null + +export interface CleanupResult { + readonly deletedFiles: number + readonly deletedDirs: number + readonly errors: readonly CleanupError[] + readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[] + readonly conflicts: readonly CleanupProtectionConflict[] + readonly message?: string +} + +export interface CleanupError { + readonly path: string + readonly type: 'file' | 'directory' + readonly error: unknown +} + +export interface CleanupProtectionConflict { + readonly outputPath: string + readonly outputPlugin: string + readonly protectedPath: string + readonly protectionMode: ProtectionMode + readonly protectedBy: string + readonly reason: string +} + +export class CleanupProtectionConflictError extends Error { + readonly conflicts: readonly CleanupProtectionConflict[] + + constructor(conflicts: readonly CleanupProtectionConflict[]) { + super(buildCleanupProtectionConflictMessage(conflicts)) + this.name = 'CleanupProtectionConflictError' + this.conflicts = conflicts + } +} + +interface NativeCleanupBinding { + readonly planCleanup?: (snapshotJson: string) => string | Promise + readonly performCleanup?: (snapshotJson: string) => string | Promise +} + +type NativeProtectionMode = 'direct' | 'recursive' +type NativeProtectionRuleMatcher = 'path' | 'glob' +type NativeCleanupTargetKind = 'file' | 'directory' | 'glob' +type NativeCleanupErrorKind = 'file' | 'directory' + +interface NativeCleanupTarget { + readonly path: string + readonly kind: NativeCleanupTargetKind + readonly excludeBasenames?: readonly string[] + readonly protectionMode?: NativeProtectionMode + readonly scope?: string + readonly label?: string +} + +interface NativeCleanupDeclarations { + readonly delete?: readonly NativeCleanupTarget[] + readonly protect?: readonly NativeCleanupTarget[] + readonly excludeScanGlobs?: readonly string[] +} + +interface NativePluginCleanupSnapshot { + readonly pluginName: string + readonly outputs: readonly string[] + readonly cleanup: NativeCleanupDeclarations +} + +interface NativeProtectedRule { + readonly path: string + readonly protectionMode: NativeProtectionMode + readonly reason: string + readonly source: string + readonly matcher?: NativeProtectionRuleMatcher | undefined +} + +interface NativeCleanupSnapshot { + readonly workspaceDir: string + readonly aindexDir?: string + readonly projectRoots: readonly string[] + readonly protectedRules: readonly NativeProtectedRule[] + readonly pluginSnapshots: readonly NativePluginCleanupSnapshot[] + readonly emptyDirExcludeGlobs?: readonly string[] +} + +interface NativeProtectedPathViolation { + readonly targetPath: string + readonly protectedPath: string + readonly protectionMode: NativeProtectionMode + readonly reason: string + readonly source: string +} + +interface NativeCleanupProtectionConflict { + readonly outputPath: string + readonly outputPlugin: string + readonly protectedPath: string + readonly protectionMode: NativeProtectionMode + readonly protectedBy: string + readonly reason: string +} + +interface NativeCleanupPlan { + readonly filesToDelete: string[] + readonly dirsToDelete: string[] + readonly emptyDirsToDelete: string[] + readonly violations: readonly NativeProtectedPathViolation[] + readonly conflicts: readonly NativeCleanupProtectionConflict[] + readonly excludedScanGlobs: string[] +} + +interface NativeCleanupError { + readonly path: string + readonly kind: NativeCleanupErrorKind + readonly error: string +} + +interface NativeCleanupResult { + readonly deletedFiles: number + readonly deletedDirs: number + readonly errors: readonly NativeCleanupError[] + readonly violations: readonly NativeProtectedPathViolation[] + readonly conflicts: readonly NativeCleanupProtectionConflict[] + readonly filesToDelete: string[] + readonly dirsToDelete: string[] + readonly emptyDirsToDelete: string[] + readonly excludedScanGlobs: string[] +} + +export function hasNativeCleanupBinding(): boolean { + if (nativeCleanupBindingCheck !== null) { + return nativeCleanupBindingCheck + } + const nativeBinding = getNativeBinding() + nativeCleanupBindingCheck = nativeBinding?.planCleanup != null && nativeBinding.performCleanup != null + return nativeCleanupBindingCheck +} + +function requireNativeCleanupBinding(): NativeCleanupBinding { + const nativeBinding = getNativeBinding() + if (nativeBinding == null) { + throw new Error('Native cleanup binding is required. Build or install the Rust NAPI package before running tnmsc.') + } + return nativeBinding +} + +function mapProtectionMode(mode: ProtectionMode): NativeProtectionMode { + return mode +} + +function mapProtectionRuleMatcher(matcher: ProtectionRuleMatcher | undefined): NativeProtectionRuleMatcher | undefined { + return matcher +} + +function mapCleanupTarget(target: OutputCleanupPathDeclaration): NativeCleanupTarget { + return { + path: target.path, + kind: target.kind, + ...target.excludeBasenames != null && target.excludeBasenames.length > 0 ? {excludeBasenames: [...target.excludeBasenames]} : {}, + ...target.protectionMode != null ? {protectionMode: mapProtectionMode(target.protectionMode)} : {}, + ...target.scope != null ? {scope: target.scope} : {}, + ...target.label != null ? {label: target.label} : {} + } +} + +async function collectPluginCleanupDeclarations(plugin: OutputPlugin, cleanCtx: OutputCleanContext): Promise { + if (plugin.declareCleanupPaths == null) return {} + return plugin.declareCleanupPaths({...cleanCtx, dryRun: true}) +} + +async function collectPluginCleanupSnapshot( + plugin: OutputPlugin, + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise { + const existingOutputDeclarations = predeclaredOutputs?.get(plugin) + const [outputs, cleanup] = await Promise.all([ + existingOutputDeclarations != null ? Promise.resolve(existingOutputDeclarations) : plugin.declareOutputFiles({...cleanCtx, dryRun: true}), + collectPluginCleanupDeclarations(plugin, cleanCtx) + ]) + + return { + pluginName: plugin.name, + outputs: outputs.map(output => output.path), + cleanup: { + ...cleanup.delete != null && cleanup.delete.length > 0 ? {delete: cleanup.delete.map(mapCleanupTarget)} : {}, + ...cleanup.protect != null && cleanup.protect.length > 0 ? {protect: cleanup.protect.map(mapCleanupTarget)} : {}, + ...cleanup.excludeScanGlobs != null && cleanup.excludeScanGlobs.length > 0 ? {excludeScanGlobs: [...cleanup.excludeScanGlobs]} : {} + } + } +} + +function collectConfiguredCleanupProtectionRules(cleanCtx: OutputCleanContext): NativeProtectedRule[] { + return (cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []).map(rule => ({ + path: rule.path, + protectionMode: mapProtectionMode(rule.protectionMode), + reason: rule.reason ?? 'configured cleanup protection rule', + source: 'configured-cleanup-protection', + matcher: mapProtectionRuleMatcher(rule.matcher ?? 'path') + })) +} + +function buildCleanupProtectionConflictMessage(conflicts: readonly NativeCleanupProtectionConflict[]): string { + const pathList = conflicts.map(conflict => conflict.outputPath).join(', ') + return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}` +} + +function logCleanupProtectionConflicts(logger: ILogger, conflicts: readonly NativeCleanupProtectionConflict[]): void { + const firstConflict = conflicts[0] + + logger.error( + buildDiagnostic({ + code: 'CLEANUP_PROTECTION_CONFLICT_DETECTED', + title: 'Cleanup output paths conflict with protected inputs', + rootCause: diagnosticLines( + `tnmsc found ${conflicts.length} output path(s) that also match protected cleanup rules.`, + firstConflict == null + ? 'No conflict details were captured.' + : `Example conflict: "${firstConflict.outputPath}" is protected by "${firstConflict.protectedPath}".` + ), + exactFix: diagnosticLines('Separate generated output paths from protected source or reserved workspace paths before running cleanup again.'), + possibleFixes: [ + diagnosticLines('Update cleanup protect declarations so they do not overlap generated outputs.'), + diagnosticLines('Move the conflicting output target to a generated-only directory.') + ], + details: { + count: conflicts.length, + conflicts + } + }) + ) +} + +function logCleanupPlanDiagnostics( + logger: ILogger, + plan: Pick< + NativeCleanupPlan | NativeCleanupResult, + 'filesToDelete' | 'dirsToDelete' | 'emptyDirsToDelete' | 'violations' | 'conflicts' | 'excludedScanGlobs' + > +): void { + logger.debug('cleanup plan built', { + filesToDelete: plan.filesToDelete.length, + dirsToDelete: plan.dirsToDelete.length + plan.emptyDirsToDelete.length, + emptyDirsToDelete: plan.emptyDirsToDelete.length, + violations: plan.violations.length, + conflicts: plan.conflicts.length, + excludedScanGlobs: plan.excludedScanGlobs + }) +} + +function logNativeCleanupErrors( + logger: ILogger, + errors: readonly NativeCleanupError[] +): readonly {path: string, type: 'file' | 'directory', error: string}[] { + return errors.map(currentError => { + const type = currentError.kind === 'directory' ? 'directory' : 'file' + logger.warn( + buildFileOperationDiagnostic({ + code: type === 'file' ? 'CLEANUP_FILE_DELETE_FAILED' : 'CLEANUP_DIRECTORY_DELETE_FAILED', + title: type === 'file' ? 'Cleanup could not delete a file' : 'Cleanup could not delete a directory', + operation: 'delete', + targetKind: type, + path: currentError.path, + error: currentError.error, + details: { + phase: 'cleanup' + } + }) + ) + + return {path: currentError.path, type, error: currentError.error} + }) +} + +async function buildCleanupSnapshot( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise { + const pluginSnapshots = await Promise.all(outputPlugins.map(async plugin => collectPluginCleanupSnapshot(plugin, cleanCtx, predeclaredOutputs))) + + const protectedRules: NativeProtectedRule[] = [] + for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) { + protectedRules.push({ + path: rule.path, + protectionMode: mapProtectionMode(rule.protectionMode), + reason: rule.reason, + source: rule.source, + ...rule.matcher != null ? {matcher: mapProtectionRuleMatcher(rule.matcher)} : {} + }) + } + + if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) { + for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, { + workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path + })) { + protectedRules.push({ + path: rule.path, + protectionMode: mapProtectionMode(rule.protectionMode), + reason: rule.reason, + source: rule.source, + ...rule.matcher != null ? {matcher: mapProtectionRuleMatcher(rule.matcher)} : {} + }) + } + } + + protectedRules.push(...collectConfiguredCleanupProtectionRules(cleanCtx)) + + // Load aindex project config (aindex.config.ts) for empty-dir exclude globs + let emptyDirExcludeGlobs: string[] | undefined + if (cleanCtx.collectedOutputContext.aindexDir != null) { + const aindexConfig = await loadAindexProjectConfig(cleanCtx.collectedOutputContext.aindexDir) + if (aindexConfig.found) { + const exclude = aindexConfig.config.emptyDirCleanup?.exclude + if (exclude != null && exclude.length > 0) { + emptyDirExcludeGlobs = [...exclude] + } + } + } + + return { + workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path, + ...cleanCtx.collectedOutputContext.aindexDir != null ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir} : {}, + projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext), + protectedRules, + pluginSnapshots, + ...emptyDirExcludeGlobs != null && emptyDirExcludeGlobs.length > 0 ? {emptyDirExcludeGlobs} : {} + } +} + +function parseNativeJson(json: string): T { + return JSON.parse(json) as T +} + +export async function planCleanupWithNative(snapshot: NativeCleanupSnapshot): Promise { + const nativeBinding = requireNativeCleanupBinding() + if (nativeBinding?.planCleanup == null) throw new Error('Native cleanup planning is unavailable') + const result = await Promise.resolve(nativeBinding.planCleanup(JSON.stringify(snapshot))) + return parseNativeJson(result) +} + +export async function performCleanupWithNative(snapshot: NativeCleanupSnapshot): Promise { + const nativeBinding = requireNativeCleanupBinding() + if (nativeBinding?.performCleanup == null) throw new Error('Native cleanup execution is unavailable') + const result = await Promise.resolve(nativeBinding.performCleanup(JSON.stringify(snapshot))) + return parseNativeJson(result) +} + +export async function collectDeletionTargets( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise<{ + filesToDelete: string[] + dirsToDelete: string[] + emptyDirsToDelete: string[] + violations: import('../ProtectedDeletionGuard').ProtectedPathViolation[] + conflicts: CleanupProtectionConflict[] + excludedScanGlobs: string[] +}> { + const snapshot = await buildCleanupSnapshot(outputPlugins, cleanCtx, predeclaredOutputs) + const plan = await planCleanupWithNative(snapshot) + + if (plan.conflicts.length > 0) { + throw new CleanupProtectionConflictError(plan.conflicts) + } + + return { + filesToDelete: plan.filesToDelete, + dirsToDelete: plan.dirsToDelete.sort((a, b) => a.localeCompare(b)), + emptyDirsToDelete: plan.emptyDirsToDelete.sort((a, b) => a.localeCompare(b)), + violations: [...plan.violations], + conflicts: [], + excludedScanGlobs: plan.excludedScanGlobs + } +} + +export async function performCleanup( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + logger: ILogger, + predeclaredOutputs?: ReadonlyMap +): Promise { + if (predeclaredOutputs != null) { + const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx, predeclaredOutputs) + logger.debug('Collected outputs for cleanup', { + projectDirs: outputs.projectDirs.length, + projectFiles: outputs.projectFiles.length, + globalDirs: outputs.globalDirs.length, + globalFiles: outputs.globalFiles.length + }) + } + + const snapshot = await buildCleanupSnapshot(outputPlugins, cleanCtx, predeclaredOutputs) + const result = await performCleanupWithNative(snapshot) + + logCleanupPlanDiagnostics(logger, result) + + if (result.conflicts.length > 0) { + logCleanupProtectionConflicts(logger, result.conflicts) + return { + deletedFiles: 0, + deletedDirs: 0, + errors: [], + violations: [], + conflicts: result.conflicts, + message: buildCleanupProtectionConflictMessage(result.conflicts) + } + } + + if (result.violations.length > 0) { + logProtectedDeletionGuardError(logger, 'cleanup', result.violations) + return { + deletedFiles: 0, + deletedDirs: 0, + errors: [], + violations: result.violations, + conflicts: [], + message: `Protected deletion guard blocked cleanup for ${result.violations.length} path(s)` + } + } + + logger.debug('cleanup delete execution started', { + filesToDelete: result.filesToDelete.length, + dirsToDelete: result.dirsToDelete.length + result.emptyDirsToDelete.length, + emptyDirsToDelete: result.emptyDirsToDelete.length + }) + const loggedErrors = logNativeCleanupErrors(logger, result.errors) + logger.debug('cleanup delete execution complete', { + deletedFiles: result.deletedFiles, + deletedDirs: result.deletedDirs, + errors: loggedErrors.length + }) + + return { + deletedFiles: result.deletedFiles, + deletedDirs: result.deletedDirs + result.emptyDirsToDelete.length, + errors: loggedErrors, + violations: [], + conflicts: [] + } +} diff --git a/sdk/src/commands/Command.ts b/sdk/src/commands/Command.ts new file mode 100644 index 00000000..7f83bc06 --- /dev/null +++ b/sdk/src/commands/Command.ts @@ -0,0 +1,95 @@ +import type {ILogger, LoggerDiagnosticRecord} from '@truenine/logger' +import type { + OutputCleanContext, + OutputCollectedContext, + OutputPlugin, + OutputWriteContext, + PluginOptions, + UserConfigFile +} from '../plugins/plugin-core' + +/** + * Command execution context + */ +export interface CommandContext { + readonly logger: ILogger + readonly outputPlugins: readonly OutputPlugin[] + readonly collectedOutputContext: OutputCollectedContext + readonly userConfigOptions: Required + readonly createCleanContext: (dryRun: boolean) => OutputCleanContext + readonly createWriteContext: (dryRun: boolean) => OutputWriteContext +} + +/** + * Command execution result + */ +export interface CommandResult { + readonly success: boolean + readonly filesAffected: number + readonly dirsAffected: number + readonly message?: string +} + +/** + * Per-plugin execution result for JSON output mode. + * Captures individual plugin execution status, timing, and error details. + */ +export interface PluginExecutionResult { + readonly pluginName: string + readonly kind: 'Input' | 'Output' + readonly status: 'success' | 'failed' | 'skipped' + readonly filesWritten?: number + readonly error?: string + readonly duration?: number +} + +/** + * Structured JSON output for command execution results. + * Extends CommandResult with per-plugin details and error aggregation + * for consumption by Tauri sidecar / external tooling. + */ +export interface JsonCommandResult { + readonly success: boolean + readonly filesAffected: number + readonly dirsAffected: number + readonly message?: string + readonly pluginResults: readonly PluginExecutionResult[] + readonly warnings: readonly LoggerDiagnosticRecord[] + readonly errors: readonly LoggerDiagnosticRecord[] +} + +/** + * JSON output for configuration information. + * Contains the merged config and the source layers that contributed to it. + */ +export interface JsonConfigInfo { + readonly merged: UserConfigFile + readonly sources: readonly ConfigSource[] +} + +/** + * Describes a single configuration source layer. + */ +export interface ConfigSource { + readonly path: string + readonly layer: 'programmatic' | 'global' | 'default' + readonly config: Partial +} + +/** + * JSON output for plugin information listing. + */ +export interface JsonPluginInfo { + readonly name: string + readonly kind: 'Input' | 'Output' + readonly description: string + readonly dependencies: readonly string[] +} + +/** + * Base command interface + */ +export interface Command { + readonly name: string + execute: (ctx: CommandContext) => Promise +} diff --git a/sdk/src/commands/CommandFactory.ts b/sdk/src/commands/CommandFactory.ts new file mode 100644 index 00000000..3604485f --- /dev/null +++ b/sdk/src/commands/CommandFactory.ts @@ -0,0 +1,29 @@ +import type {Command} from './Command' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' + +/** + * Command factory interface + * Each factory knows how to create a specific command based on CLI args + */ +export interface CommandFactory { + canHandle: (args: ParsedCliArgs) => boolean + + createCommand: (args: ParsedCliArgs) => Command +} + +/** + * Priority levels for command factory resolution + * Lower number = higher priority + */ +export enum FactoryPriority { + Flags = 0, // --version, --help flags (highest priority) + Unknown = 1, // Unknown command handling + Subcommand = 2 // Named subcommands +} + +/** + * Extended factory interface with priority + */ +export interface PrioritizedCommandFactory extends CommandFactory { + readonly priority: FactoryPriority +} diff --git a/sdk/src/commands/CommandRegistry.ts b/sdk/src/commands/CommandRegistry.ts new file mode 100644 index 00000000..91d16351 --- /dev/null +++ b/sdk/src/commands/CommandRegistry.ts @@ -0,0 +1,43 @@ +import type {Command} from './Command' +import type {CommandFactory, PrioritizedCommandFactory} from './CommandFactory' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {FactoryPriority} from './CommandFactory' + +/** + * Command registry that manages command factories + * Uses priority-based resolution for factory selection + */ +export class CommandRegistry { + private readonly factories: PrioritizedCommandFactory[] = [] + + register(factory: PrioritizedCommandFactory): void { + this.factories.push(factory) + this.factories.sort((a, b) => a.priority - b.priority) // Sort by priority (lower number = higher priority) + } + + registerWithPriority(factory: CommandFactory, priority: FactoryPriority): void { + const prioritized: PrioritizedCommandFactory = { // Create a wrapper that delegates to the original factory while adding priority + priority, + canHandle: (args: ParsedCliArgs) => factory.canHandle(args), + createCommand: (args: ParsedCliArgs) => factory.createCommand(args) + } + this.factories.push(prioritized) + this.factories.sort((a, b) => a.priority - b.priority) + } + + resolve(args: ParsedCliArgs): Command { + for (const factory of this.factories) { // First pass: check prioritized factories (flags, unknown commands) + if (factory.priority <= FactoryPriority.Unknown && factory.canHandle(args)) return factory.createCommand(args) + } + + for (const factory of this.factories) { // Second pass: check subcommand factories + if (factory.priority === FactoryPriority.Subcommand && factory.canHandle(args)) return factory.createCommand(args) + } + + for (const factory of this.factories) { // Third pass: use catch-all factory (ExecuteCommandFactory) + if (factory.canHandle(args)) return factory.createCommand(args) + } + + throw new Error('No command factory found for the given arguments') // This should never happen if ExecuteCommandFactory is registered + } +} diff --git a/sdk/src/commands/CommandUtils.ts b/sdk/src/commands/CommandUtils.ts new file mode 100644 index 00000000..a1b522f9 --- /dev/null +++ b/sdk/src/commands/CommandUtils.ts @@ -0,0 +1,70 @@ +/** + * Result summary from aggregating plugin outputs + */ +export interface AggregatedResults { + readonly totalFiles: number + readonly totalDirs: number +} + +/** + * Aggregate file and directory counts from plugin results. + * + * @param results - Map of plugin name to their write results + * @returns Aggregated counts of files and directories + */ +export function aggregatePluginResults( + results: Map +): AggregatedResults { + let totalFiles = 0 + let totalDirs = 0 + + for (const result of results.values()) { + totalFiles += result.files.length + totalDirs += result.dirs.length + } + + return {totalFiles, totalDirs} +} + +/** + * Create a standard CommandResult object. + * Centralizes the result object creation pattern used across commands. + * + * @param success - Whether the command succeeded + * @param filesAffected - Number of files affected + * @param dirsAffected - Number of directories affected + * @param message - Optional message + */ +export function createCommandResult( + success: boolean, + filesAffected: number, + dirsAffected: number, + message?: string +): {success: boolean, filesAffected: number, dirsAffected: number, message?: string} { + return message != null + ? {success, filesAffected, dirsAffected, message} + : {success, filesAffected, dirsAffected} +} + +/** + * Log plugin results with a consistent format. + * + * @param results - Map of plugin name to their results + * @param logger - Logger instance for output + * @param logger.info - Logger info method + * @param dryRun - Whether this is a dry-run execution + */ +export function logPluginResults( + results: Map, + logger: {info: (msg: string, meta?: object) => void}, + dryRun: boolean = false +): void { + for (const [pluginName, result] of results) { + logger.info('plugin result', { + plugin: pluginName, + files: result.files.length, + dirs: result.dirs.length, + ...dryRun && {dryRun: true} + }) + } +} diff --git a/sdk/src/commands/ConfigCommand.ts b/sdk/src/commands/ConfigCommand.ts new file mode 100644 index 00000000..68b10277 --- /dev/null +++ b/sdk/src/commands/ConfigCommand.ts @@ -0,0 +1,237 @@ +import type {Command, CommandContext, CommandResult} from './Command' +import type {AindexConfigKeyPath} from '@/plugins/plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {buildUsageDiagnostic, diagnosticLines} from '@/diagnostics' +import {AINDEX_CONFIG_KEY_PATHS} from '@/plugins/plugin-core' +import {getRequiredGlobalConfigPath} from '@/runtime-environment' + +/** + * Valid configuration keys that can be set via `tnmsc config key=value`. + * Nested keys use dot-notation: aindex.skills.src, aindex.commands.src, etc. + */ +type ValidConfigKey = 'workspaceDir' | 'logLevel' | AindexConfigKeyPath + +const VALID_CONFIG_KEYS: readonly ValidConfigKey[] = [ + 'workspaceDir', + ...AINDEX_CONFIG_KEY_PATHS, + 'logLevel' +] + +/** + * Validate if a key is a valid config key + */ +function isValidConfigKey(key: string): key is ValidConfigKey { + return VALID_CONFIG_KEYS.includes(key as ValidConfigKey) +} + +/** + * Validate log level value + */ +function isValidLogLevel(value: string): boolean { + const validLevels = ['trace', 'debug', 'info', 'warn', 'error'] + return validLevels.includes(value) +} + +/** + * Get global config file path + */ +function getGlobalConfigPath(): string { + return getRequiredGlobalConfigPath() +} + +/** + * Read global config file + */ +function readGlobalConfig(): ConfigObject { + const configPath = getGlobalConfigPath() + if (!fs.existsSync(configPath)) return {} + try { + const content = fs.readFileSync(configPath, 'utf8') + return JSON.parse(content) as ConfigObject + } + catch { + return {} + } +} + +/** + * Write global config file + */ +function writeGlobalConfig(config: ConfigObject): void { + const configPath = getGlobalConfigPath() + const configDir = path.dirname(configPath) + + if (!fs.existsSync(configDir)) fs.mkdirSync(configDir, {recursive: true}) // Ensure directory exists + + fs.writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, 'utf8') // Write with pretty formatting +} + +type ConfigValue = string | ConfigObject +interface ConfigObject { + [key: string]: ConfigValue | undefined +} + +/** + * Set a nested value in an object using dot-notation key + */ +function setNestedValue(obj: ConfigObject, key: string, value: string): void { + const parts = key.split('.') + let current: ConfigObject = obj + for (let i = 0; i < parts.length - 1; i++) { + const part = parts[i] + if (part == null) continue + const next = current[part] + if (typeof next !== 'object' || next === null || Array.isArray(next)) current[part] = {} + current = current[part] as ConfigObject + } + + const lastPart = parts.at(-1) + if (lastPart == null) return + current[lastPart] = value +} + +/** + * Get a nested value from an object using dot-notation key + */ +function getNestedValue(obj: ConfigObject, key: string): ConfigValue | undefined { + const parts = key.split('.') + let current: ConfigValue | undefined = obj + for (const part of parts) { + if (typeof current !== 'object' || current === null || Array.isArray(current)) return void 0 + current = current[part] + } + return current +} + +export class ConfigCommand implements Command { + readonly name = 'config' + + constructor( + private readonly options: readonly [key: string, value: string][] + ) { } + + async execute(ctx: CommandContext): Promise { + const {logger} = ctx + + if (this.options.length === 0) { + logger.error(buildUsageDiagnostic({ + code: 'CONFIG_COMMAND_ARGUMENTS_MISSING', + title: 'Config command requires at least one key=value pair', + rootCause: diagnosticLines( + 'tnmsc config was invoked without any configuration assignments.' + ), + exactFix: diagnosticLines( + 'Run `tnmsc config key=value` with at least one supported configuration key.' + ), + possibleFixes: [ + diagnosticLines(`Use one of the supported keys: ${VALID_CONFIG_KEYS.join(', ')}`) + ], + details: { + validKeys: [...VALID_CONFIG_KEYS] + } + })) + logger.info('Usage: tnmsc config key=value') + logger.info(`Valid keys: ${VALID_CONFIG_KEYS.join(', ')}`) + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: 'No options provided' + } + } + + let config: ConfigObject + + try { + config = readGlobalConfig() + } + catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: errorMessage + } + } + + const errors: string[] = [] + const updated: string[] = [] + + for (const [key, value] of this.options) { // Process each key-value pair + if (!isValidConfigKey(key)) { + errors.push(`Invalid key: ${key}`) + logger.error(buildUsageDiagnostic({ + code: 'CONFIG_COMMAND_KEY_INVALID', + title: `Unsupported config key: ${key}`, + rootCause: diagnosticLines( + `The config command received "${key}", which is not a supported configuration key.` + ), + exactFix: diagnosticLines('Use one of the supported config keys and rerun the command.'), + possibleFixes: [ + diagnosticLines(`Supported keys: ${VALID_CONFIG_KEYS.join(', ')}`) + ], + details: { + key, + validKeys: [...VALID_CONFIG_KEYS] + } + })) + continue + } + + if (key === 'logLevel' && !isValidLogLevel(value)) { // Special validation for logLevel + errors.push(`Invalid logLevel value: ${value}`) + logger.error(buildUsageDiagnostic({ + code: 'CONFIG_COMMAND_LOG_LEVEL_INVALID', + title: `Unsupported logLevel value: ${value}`, + rootCause: diagnosticLines( + `The config command received "${value}" for logLevel, but tnmsc does not support that level.` + ), + exactFix: diagnosticLines('Set logLevel to one of: trace, debug, info, warn, or error.'), + details: { + key, + value, + validLevels: ['trace', 'debug', 'info', 'warn', 'error'] + } + })) + continue + } + + const oldValue = getNestedValue(config, key) // Update config + setNestedValue(config, key, value) + + if (oldValue !== value) updated.push(`${key}=${value}`) + + logger.info('configuration updated', {key, value}) + } + + if (updated.length > 0) { // Write config if there are valid updates + try { + writeGlobalConfig(config) + } + catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: errorMessage + } + } + logger.info('global config written', {path: getGlobalConfigPath()}) + } + + const success = errors.length === 0 + const message = success + ? `Configuration updated: ${updated.join(', ')}` + : `Partial update: ${updated.join(', ')}. Errors: ${errors.join(', ')}` + + return { + success, + filesAffected: updated.length > 0 ? 1 : 0, + dirsAffected: 0, + message + } + } +} diff --git a/sdk/src/commands/ConfigShowCommand.ts b/sdk/src/commands/ConfigShowCommand.ts new file mode 100644 index 00000000..2a21822a --- /dev/null +++ b/sdk/src/commands/ConfigShowCommand.ts @@ -0,0 +1,48 @@ +import type {Command, CommandContext, CommandResult, ConfigSource, JsonConfigInfo} from './Command' +import process from 'node:process' +import {ConfigLoader} from '@/ConfigLoader' + +/** + * Command that outputs the current merged configuration and its source layers as JSON. + * + * Invoked via `tnmsc config --show --json`. + * Writes a `JsonConfigInfo` object to stdout containing: + * - `merged`: the final merged UserConfigFile + * - `sources`: an array of ConfigSource entries describing each layer + * + * When used without `--json`, logs the config info via the logger. + */ +export class ConfigShowCommand implements Command { + readonly name = 'config-show' + + async execute(ctx: CommandContext): Promise { + const {logger} = ctx + const loader = new ConfigLoader() + const mergedResult = loader.load() + + const sources: ConfigSource[] = mergedResult.sources.map(sourcePath => { + const loaded = loader.loadFromFile(sourcePath) + return { + path: sourcePath, + layer: 'global', + config: loaded.config + } + }) + + const configInfo: JsonConfigInfo = { + merged: mergedResult.config, + sources + } + + process.stdout.write(`${JSON.stringify(configInfo)}\n`) + + logger.info('config shown', {sources: mergedResult.sources.length}) + + return { + success: true, + filesAffected: 0, + dirsAffected: 0, + message: `Configuration displayed (${sources.length} source(s))` + } + } +} diff --git a/sdk/src/commands/DryRunCleanCommand.ts b/sdk/src/commands/DryRunCleanCommand.ts new file mode 100644 index 00000000..72ce58c5 --- /dev/null +++ b/sdk/src/commands/DryRunCleanCommand.ts @@ -0,0 +1,74 @@ +import type {Command, CommandContext, CommandResult} from './Command' +import * as path from 'node:path' +import {collectAllPluginOutputs} from '../plugins/plugin-core' +import {logProtectedDeletionGuardError} from '../ProtectedDeletionGuard' +import {collectDeletionTargets} from './CleanupUtils' + +/** + * Dry-run clean command - simulates clean operations without actual deletion + */ +export class DryRunCleanCommand implements Command { + readonly name = 'dry-run-clean' + + async execute(ctx: CommandContext): Promise { + const {logger, outputPlugins, createCleanContext} = ctx + logger.info('running clean pipeline', {command: 'dry-run-clean', dryRun: true}) + + const cleanCtx = createCleanContext(true) + const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx) + + logger.info('collected outputs for cleanup', { + dryRun: true, + projectDirs: outputs.projectDirs.length, + projectFiles: outputs.projectFiles.length, + globalDirs: outputs.globalDirs.length, + globalFiles: outputs.globalFiles.length + }) + + const {filesToDelete, dirsToDelete, emptyDirsToDelete, violations, excludedScanGlobs} = await collectDeletionTargets(outputPlugins, cleanCtx) + const totalDirsToDelete = [...dirsToDelete, ...emptyDirsToDelete] + + if (violations.length > 0) { + logProtectedDeletionGuardError(logger, 'dry-run-cleanup', violations) + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: `Protected deletion guard blocked cleanup for ${violations.length} path(s)` + } + } + + this.logDryRunFiles(filesToDelete, logger) + this.logDryRunDirectories(totalDirsToDelete, logger) + + logger.info('clean complete', { + dryRun: true, + filesAffected: filesToDelete.length, + dirsAffected: totalDirsToDelete.length, + violations: 0, + excludedScanGlobs + }) + + return { + success: true, + filesAffected: filesToDelete.length, + dirsAffected: totalDirsToDelete.length, + message: 'Dry-run complete, no files were deleted' + } + } + + private logDryRunFiles(files: string[], logger: CommandContext['logger']): void { + for (const file of files) { + const resolved = path.isAbsolute(file) ? file : path.resolve(file) + logger.info('would delete file', {path: resolved, dryRun: true}) + } + } + + private logDryRunDirectories(dirs: string[], logger: CommandContext['logger']): void { + const sortedDirs = [...dirs].sort((a, b) => b.length - a.length) + for (const dir of sortedDirs) { + const resolved = path.isAbsolute(dir) ? dir : path.resolve(dir) + logger.info('would delete directory', {path: resolved, dryRun: true}) + } + } +} diff --git a/sdk/src/commands/DryRunOutputCommand.ts b/sdk/src/commands/DryRunOutputCommand.ts new file mode 100644 index 00000000..180501f6 --- /dev/null +++ b/sdk/src/commands/DryRunOutputCommand.ts @@ -0,0 +1,51 @@ +import type {Command, CommandContext, CommandResult} from './Command' +import {syncWindowsConfigIntoWsl} from '@/wsl-mirror-sync' +import { + collectOutputDeclarations, + executeDeclarativeWriteOutputs +} from '../plugins/plugin-core' + +/** + * Dry-run output command - simulates write operations without actual I/O + */ +export class DryRunOutputCommand implements Command { + readonly name = 'dry-run-output' + + async execute(ctx: CommandContext): Promise { + const {logger, outputPlugins, createWriteContext} = ctx + logger.info('started', {command: 'dry-run-output', dryRun: true}) + + const writeCtx = createWriteContext(true) + const predeclaredOutputs = await collectOutputDeclarations(outputPlugins, writeCtx) + const results = await executeDeclarativeWriteOutputs(outputPlugins, writeCtx, predeclaredOutputs) + + let totalFiles = 0 + let totalDirs = 0 + for (const [pluginName, result] of results) { + totalFiles += result.files.length + totalDirs += result.dirs.length + logger.info('plugin result', {plugin: pluginName, files: result.files.length, dirs: result.dirs.length, dryRun: true}) + } + + const wslMirrorResult = await syncWindowsConfigIntoWsl(outputPlugins, writeCtx, void 0, predeclaredOutputs) + if (wslMirrorResult.errors.length > 0) { + return { + success: false, + filesAffected: totalFiles, + dirsAffected: totalDirs, + message: wslMirrorResult.errors.join('\n') + } + } + + totalFiles += wslMirrorResult.mirroredFiles + + logger.info('complete', {command: 'dry-run-output', totalFiles, totalDirs, dryRun: true}) + + return { + success: true, + filesAffected: totalFiles, + dirsAffected: totalDirs, + message: 'Dry-run complete, no files were written' + } + } +} diff --git a/sdk/src/commands/ExecuteCommand.ts b/sdk/src/commands/ExecuteCommand.ts new file mode 100644 index 00000000..8f4c1c96 --- /dev/null +++ b/sdk/src/commands/ExecuteCommand.ts @@ -0,0 +1,79 @@ +import type {Command, CommandContext, CommandResult} from './Command' +import {syncWindowsConfigIntoWsl} from '@/wsl-mirror-sync' +import { + collectOutputDeclarations, + executeDeclarativeWriteOutputs +} from '../plugins/plugin-core' +import {performCleanup} from './CleanupUtils' + +/** + * Execute command - performs actual write operations + * Includes pre-cleanup to remove stale files before writing new outputs + */ +export class ExecuteCommand implements Command { + readonly name = 'execute' + + async execute(ctx: CommandContext): Promise { + const {logger, outputPlugins, createCleanContext, createWriteContext} = ctx + logger.info('started', {command: 'execute'}) + + const writeCtx = createWriteContext(false) + const predeclaredOutputs = await collectOutputDeclarations(outputPlugins, writeCtx) + const cleanCtx = createCleanContext(false) // Step 1: Pre-cleanup (non-dry-run only) + const cleanupResult = await performCleanup(outputPlugins, cleanCtx, logger, predeclaredOutputs) + + if (cleanupResult.violations.length > 0 || cleanupResult.conflicts.length > 0) { + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + ...cleanupResult.message != null ? {message: cleanupResult.message} : {} + } + } + + logger.info('cleanup complete', {deletedFiles: cleanupResult.deletedFiles, deletedDirs: cleanupResult.deletedDirs}) + + const results = await executeDeclarativeWriteOutputs(outputPlugins, writeCtx, predeclaredOutputs) // Step 2: Write outputs + + let totalFiles = 0 + let totalDirs = 0 + const writeErrors: string[] = [] + for (const result of results.values()) { + totalFiles += result.files.length + totalDirs += result.dirs.length + for (const fileResult of result.files) { + if (!fileResult.success) writeErrors.push(fileResult.error?.message ?? `Failed to write ${fileResult.path}`) + } + } + + if (writeErrors.length > 0) { + return { + success: false, + filesAffected: totalFiles, + dirsAffected: totalDirs, + message: writeErrors.join('\n') + } + } + + const wslMirrorResult = await syncWindowsConfigIntoWsl(outputPlugins, writeCtx, void 0, predeclaredOutputs) + + if (wslMirrorResult.errors.length > 0) { + return { + success: false, + filesAffected: totalFiles, + dirsAffected: totalDirs, + message: wslMirrorResult.errors.join('\n') + } + } + + totalFiles += wslMirrorResult.mirroredFiles + + logger.info('complete', {command: 'execute', pluginCount: results.size}) + + return { + success: true, + filesAffected: totalFiles, + dirsAffected: totalDirs + } + } +} diff --git a/sdk/src/commands/HelpCommand.ts b/sdk/src/commands/HelpCommand.ts new file mode 100644 index 00000000..ae7201d1 --- /dev/null +++ b/sdk/src/commands/HelpCommand.ts @@ -0,0 +1,77 @@ +import type {Command, CommandContext, CommandResult} from './Command' +import {AINDEX_CONFIG_KEY_PATHS} from '@/plugins/plugin-core' +import {getCliVersion} from './VersionCommand' + +const CLI_NAME = 'tnmsc' +const CONFIG_KEY_LIST_TEXT = ['workspaceDir', 'logLevel', ...AINDEX_CONFIG_KEY_PATHS].join(',\n ') + +const HELP_TEXT = ` +${CLI_NAME} v${getCliVersion()} - Memory Sync CLI + +Synchronize AI memory and configuration files across projects. + +USAGE: + ${CLI_NAME} Run the sync pipeline (default) + ${CLI_NAME} help Show this help message + ${CLI_NAME} version Show version information + ${CLI_NAME} init Deprecated; no longer initializes aindex + ${CLI_NAME} dry-run Preview what would be written + ${CLI_NAME} clean Remove all generated files + ${CLI_NAME} clean --dry-run Preview what would be cleaned + ${CLI_NAME} config key=value Set configuration value + +SUBCOMMANDS: + help Show this help message + version Show version information + init Deprecated; keep public target-relative definitions manually + dry-run Preview changes without writing files + clean Remove all generated output files and directories + config Set configuration values in global config file (~/.aindex/.tnmsc.json) + +ALIASES: + ${CLI_NAME} --help, ${CLI_NAME} -h Same as '${CLI_NAME} help' + ${CLI_NAME} --version, ${CLI_NAME} -v Same as '${CLI_NAME} version' + ${CLI_NAME} clean -n Same as '${CLI_NAME} clean --dry-run' + ${CLI_NAME} config key=value Set config value in global config file + +LOG LEVEL OPTIONS: + --trace Most verbose output + --debug Debug information + --info Standard information (default) + --warn Warnings only + --error Errors only + +CLEAN OPTIONS: + -n, --dry-run Preview cleanup without removing files + +CONFIG OPTIONS: + key=value Set a configuration value in global config (~/.aindex/.tnmsc.json) + Valid keys: ${CONFIG_KEY_LIST_TEXT} + + Examples: + ${CLI_NAME} config workspaceDir=~/my-project + ${CLI_NAME} config aindex.skills.src=skills + ${CLI_NAME} config logLevel=debug + +CONFIGURATION: + Configure via plugin.config.ts in your project root. + See documentation for detailed configuration options. +`.trim() + +/** + * Help command - displays CLI usage information + */ +export class HelpCommand implements Command { + readonly name = 'help' + + async execute(ctx: CommandContext): Promise { + ctx.logger.info(HELP_TEXT) + + return { + success: true, + filesAffected: 0, + dirsAffected: 0, + message: 'Help displayed' + } + } +} diff --git a/sdk/src/commands/InitCommand.test.ts b/sdk/src/commands/InitCommand.test.ts new file mode 100644 index 00000000..3224c8f6 --- /dev/null +++ b/sdk/src/commands/InitCommand.test.ts @@ -0,0 +1,78 @@ +import type {CommandContext} from './Command' +import * as fs from 'node:fs' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger, FilePathKind} from '../plugins/plugin-core' +import {InitCommand} from './InitCommand' + +function createCommandContext(): CommandContext { + const workspaceDir = path.resolve('tmp-init-command') + const userConfigOptions = mergeConfig({workspaceDir}) + + return { + logger: createLogger('InitCommandTest', 'error'), + outputPlugins: [], + userConfigOptions, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + } + }, + createCleanContext: dryRun => ({ + logger: createLogger('InitCommandTest', 'error'), + fs, + path, + glob, + dryRun, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + } + } + }) as CommandContext['createCleanContext'] extends (dryRun: boolean) => infer T ? T : never, + createWriteContext: dryRun => ({ + logger: createLogger('InitCommandTest', 'error'), + fs, + path, + glob, + dryRun, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + } + } + }) as CommandContext['createWriteContext'] extends (dryRun: boolean) => infer T ? T : never + } +} + +describe('init command', () => { + it('returns a deprecation failure without creating files', async () => { + const result = await new InitCommand().execute(createCommandContext()) + + expect(result.success).toBe(false) + expect(result.filesAffected).toBe(0) + expect(result.dirsAffected).toBe(0) + expect(result.message).toContain('deprecated') + expect(result.message).toContain('~/workspace/aindex/public/') + }) +}) diff --git a/sdk/src/commands/InitCommand.ts b/sdk/src/commands/InitCommand.ts new file mode 100644 index 00000000..98180fcc --- /dev/null +++ b/sdk/src/commands/InitCommand.ts @@ -0,0 +1,36 @@ +import type {Command, CommandContext, CommandResult} from './Command' +import {buildUsageDiagnostic, diagnosticLines} from '@/diagnostics' + +const INIT_DEPRECATION_MESSAGE = '`tnmsc init` is deprecated and no longer initializes aindex. Maintain the public target-relative definitions manually under `~/workspace/aindex/public/`.' + +export class InitCommand implements Command { + readonly name = 'init' + + async execute(ctx: CommandContext): Promise { + const {logger} = ctx + + logger.warn(buildUsageDiagnostic({ + code: 'INIT_COMMAND_DEPRECATED', + title: 'The init command is deprecated', + rootCause: diagnosticLines( + '`tnmsc init` no longer initializes aindex content or project definitions.' + ), + exactFix: diagnosticLines( + 'Maintain the target-relative definitions manually under `~/workspace/aindex/public/`.' + ), + possibleFixes: [ + diagnosticLines('Run `tnmsc help` to find a supported replacement command for your workflow.') + ], + details: { + command: 'init' + } + })) + + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: INIT_DEPRECATION_MESSAGE + } + } +} diff --git a/sdk/src/commands/JsonOutputCommand.ts b/sdk/src/commands/JsonOutputCommand.ts new file mode 100644 index 00000000..3123e96c --- /dev/null +++ b/sdk/src/commands/JsonOutputCommand.ts @@ -0,0 +1,56 @@ +import type {Command, CommandContext, CommandResult, JsonCommandResult} from './Command' +import process from 'node:process' +import {partitionBufferedDiagnostics} from '@/diagnostics' +import {clearBufferedDiagnostics, drainBufferedDiagnostics} from '@/plugins/plugin-core' + +/** + * Decorator command that wraps any Command to produce JSON output on stdout. + * + * When the `--json` flag is detected, this wrapper: + * 1. Suppresses all Winston console logging (sets global log level to 'silent') + * 2. Delegates execution to the inner command + * 3. Converts the CommandResult to a JsonCommandResult + * 4. Writes the JSON string to stdout + * + * This ensures clean, parseable JSON output for consumption by + * Tauri sidecar or other external tooling. + */ +export class JsonOutputCommand implements Command { + readonly name: string + private readonly inner: Command + + constructor(inner: Command) { + this.inner = inner + this.name = `json:${inner.name}` + } + + async execute(ctx: CommandContext): Promise { + clearBufferedDiagnostics() + const result = await this.inner.execute(ctx) + const jsonResult = toJsonCommandResult(result, drainBufferedDiagnostics()) + process.stdout.write(`${JSON.stringify(jsonResult)}\n`) + return result + } +} + +/** + * Convert a CommandResult to a JsonCommandResult. + * Maps the base result fields and initialises optional arrays as empty + * when not present, ensuring a consistent JSON shape. + */ +export function toJsonCommandResult( + result: CommandResult, + diagnostics = drainBufferedDiagnostics() +): JsonCommandResult { + const {warnings, errors} = partitionBufferedDiagnostics(diagnostics) + const json: JsonCommandResult = { + success: result.success, + filesAffected: result.filesAffected, + dirsAffected: result.dirsAffected, + ...result.message != null && {message: result.message}, + pluginResults: [], + warnings, + errors + } + return json +} diff --git a/sdk/src/commands/PluginsCommand.ts b/sdk/src/commands/PluginsCommand.ts new file mode 100644 index 00000000..8f284a06 --- /dev/null +++ b/sdk/src/commands/PluginsCommand.ts @@ -0,0 +1,54 @@ +import type {Command, CommandContext, CommandResult, JsonPluginInfo} from './Command' +import process from 'node:process' + +/** + * Command that outputs all registered output plugin information as JSON. + * + * Invoked via `tnmsc plugins --json`. + * Writes a `JsonPluginInfo[]` array to stdout containing each output plugin's + * name, description, and dependency list. + * + * When used without `--json`, logs the plugin list via the logger. + */ +export class PluginsCommand implements Command { + readonly name = 'plugins' + + async execute(ctx: CommandContext): Promise { + const {logger, outputPlugins, userConfigOptions} = ctx + + const allPlugins = userConfigOptions.plugins + const pluginInfos: JsonPluginInfo[] = [] + + for (const plugin of allPlugins) { + pluginInfos.push({ + name: plugin.name, + kind: 'Output', + description: plugin.name, + dependencies: [...plugin.dependsOn ?? []] + }) + } + + const registeredNames = new Set(pluginInfos.map(p => p.name)) // (they are registered separately via registerOutputPlugins) // Also include output plugins that may not be in userConfigOptions.plugins + for (const plugin of outputPlugins) { + if (!registeredNames.has(plugin.name)) { + pluginInfos.push({ + name: plugin.name, + kind: 'Output', + description: plugin.name, + dependencies: [...plugin.dependsOn ?? []] + }) + } + } + + process.stdout.write(`${JSON.stringify(pluginInfos)}\n`) + + logger.info('plugins listed', {count: pluginInfos.length}) + + return { + success: true, + filesAffected: 0, + dirsAffected: 0, + message: `Listed ${pluginInfos.length} plugin(s)` + } + } +} diff --git a/sdk/src/commands/ProtectedDeletionCommands.test.ts b/sdk/src/commands/ProtectedDeletionCommands.test.ts new file mode 100644 index 00000000..3b431b65 --- /dev/null +++ b/sdk/src/commands/ProtectedDeletionCommands.test.ts @@ -0,0 +1,277 @@ +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin, OutputWriteContext} from '../plugins/plugin-core' +import type {CommandContext} from './Command' +import * as fs from 'node:fs' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it, vi} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger, FilePathKind, PluginKind} from '../plugins/plugin-core' +import {CleanCommand} from './CleanCommand' +import {DryRunCleanCommand} from './DryRunCleanCommand' +import {ExecuteCommand} from './ExecuteCommand' +import {JsonOutputCommand} from './JsonOutputCommand' + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger +} + +function createMockOutputPlugin( + cleanup?: OutputCleanupDeclarations, + convertContent?: OutputPlugin['convertContent'] +): OutputPlugin { + return { + type: PluginKind.Output, + name: 'MockOutputPlugin', + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return [{path: path.join(path.resolve('tmp-workspace-command'), 'project-a', 'AGENTS.md'), source: {}}] + }, + async declareCleanupPaths() { + return cleanup ?? {} + }, + async convertContent(declaration, ctx) { + if (convertContent != null) return convertContent(declaration, ctx) + return 'test' + } + } +} + +function createCommandContext( + outputPlugins: readonly OutputPlugin[], + workspaceDir: string = path.resolve('tmp-workspace-command') +): CommandContext { + const aindexDir = path.join(workspaceDir, 'aindex') + const userConfigOptions = mergeConfig({workspaceDir}) + const collectedOutputContext = { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [{ + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + } + }] + }, + aindexDir + } + + return { + logger: createMockLogger(), + outputPlugins, + collectedOutputContext, + userConfigOptions, + createCleanContext: (dryRun: boolean): OutputCleanContext => ({ + logger: createMockLogger(), + fs, + path, + glob, + collectedOutputContext, + pluginOptions: userConfigOptions, + dryRun + }), + createWriteContext: (dryRun: boolean): OutputWriteContext => ({ + logger: createMockLogger(), + fs, + path, + glob, + collectedOutputContext, + dryRun, + registeredPluginNames: outputPlugins.map(plugin => plugin.name) + }) + } +} + +describe('protected deletion commands', () => { + it('returns failure for clean and dry-run-clean when cleanup hits a protected path', async () => { + const workspaceDir = path.resolve('tmp-workspace-command') + const plugin = createMockOutputPlugin({ + delete: [{kind: 'directory', path: workspaceDir}] + }) + const ctx = createCommandContext([plugin]) + + await expect(new CleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ + success: false, + message: expect.stringContaining('Protected deletion guard blocked cleanup') + })) + await expect(new DryRunCleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ + success: false, + message: expect.stringContaining('Protected deletion guard blocked cleanup') + })) + }) + + it('returns failure before writes run when execute pre-cleanup hits a protected path', async () => { + const workspaceDir = path.resolve('tmp-workspace-command') + const convertContent = vi.fn(async () => 'should-not-write') + const plugin = createMockOutputPlugin({ + delete: [{kind: 'directory', path: workspaceDir}] + }, convertContent) + const ctx = createCommandContext([plugin]) + + await expect(new ExecuteCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ + success: false, + message: expect.stringContaining('Protected deletion guard blocked cleanup') + })) + expect(convertContent).not.toHaveBeenCalled() + }) + + it('returns failure when an output path conflicts with a cleanup protect declaration', async () => { + const outputPath = path.join(path.resolve('tmp-workspace-command'), 'project-a', 'AGENTS.md') + const plugin = createMockOutputPlugin({ + protect: [{kind: 'file', path: outputPath}] + }) + const ctx = createCommandContext([plugin]) + + await expect(new CleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ + success: false, + message: expect.stringContaining('Cleanup protection conflict') + })) + }) + + it('reuses declared outputs across cleanup and write during execute', async () => { + const workspaceDir = path.resolve('tmp-workspace-command-cached') + const outputPath = path.join(workspaceDir, 'project-a', 'AGENTS.md') + let declareOutputFilesCalls = 0 + const plugin: OutputPlugin = { + type: PluginKind.Output, + name: 'CachedOutputPlugin', + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + declareOutputFilesCalls += 1 + return [{path: outputPath, source: {}}] + }, + async declareCleanupPaths() { + return {} + }, + async convertContent() { + return 'cached-output' + } + } + + fs.rmSync(workspaceDir, {recursive: true, force: true}) + fs.mkdirSync(path.join(workspaceDir, 'project-a'), {recursive: true}) + + try { + const ctx = createCommandContext([plugin], workspaceDir) + const result = await new ExecuteCommand().execute(ctx) + + expect(result.success).toBe(true) + expect(declareOutputFilesCalls).toBe(1) + expect(fs.readFileSync(outputPath, 'utf8')).toBe('cached-output') + } + finally { + fs.rmSync(workspaceDir, {recursive: true, force: true}) + } + }) + + it('includes structured diagnostics in JSON output errors', async () => { + const writeSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + const command = new JsonOutputCommand({ + name: 'mock', + async execute(ctx) { + ctx.logger.error({ + code: 'MOCK_FAILURE', + title: 'Mock command failed', + rootCause: ['The mock command was forced to fail for JSON output testing.'], + exactFix: ['Update the mock command inputs so it no longer emits the test failure.'] + }) + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: 'blocked' + } + } + }) + + try { + await command.execute({ + ...createCommandContext([]), + logger: createLogger('ProtectedDeletionJsonTest', 'silent') + }) + expect(writeSpy).toHaveBeenCalledOnce() + const payload = JSON.parse(String(writeSpy.mock.calls[0]?.[0])) as { + readonly message?: string + readonly warnings: readonly unknown[] + readonly errors: readonly {code: string, title: string, rootCause: readonly string[], copyText: readonly string[]}[] + } + + expect(payload.message).toBe('blocked') + expect(payload.warnings).toEqual([]) + expect(payload.errors).toEqual([ + expect.objectContaining({ + code: 'MOCK_FAILURE', + title: 'Mock command failed', + rootCause: ['The mock command was forced to fail for JSON output testing.'], + copyText: expect.arrayContaining(['[MOCK_FAILURE] Mock command failed']) + }) + ]) + } + finally { + writeSpy.mockRestore() + } + }) + + it('includes workspace empty directories in clean dry-run results', async () => { + const workspaceDir = path.resolve('tmp-workspace-command-dry-run-empty') + const generatedDir = path.join(workspaceDir, 'generated') + const generatedFile = path.join(generatedDir, 'AGENTS.md') + const emptyLeafDir = path.join(workspaceDir, 'scratch', 'empty', 'leaf') + const retainedScratchFile = path.join(workspaceDir, 'scratch', 'keep.md') + const plugin: OutputPlugin = { + type: PluginKind.Output, + name: 'DryRunEmptyDirPlugin', + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return [{path: generatedFile, source: {}}] + }, + async declareCleanupPaths() { + return {} + }, + async convertContent() { + return '' + } + } + + fs.rmSync(workspaceDir, {recursive: true, force: true}) + fs.mkdirSync(generatedDir, {recursive: true}) + fs.mkdirSync(emptyLeafDir, {recursive: true}) + fs.mkdirSync(path.dirname(retainedScratchFile), {recursive: true}) + fs.writeFileSync(generatedFile, '# generated', 'utf8') + fs.writeFileSync(retainedScratchFile, '# keep', 'utf8') + + try { + const ctx = createCommandContext([plugin], workspaceDir) + const result = await new DryRunCleanCommand().execute(ctx) + + expect(result).toEqual(expect.objectContaining({ + success: true, + filesAffected: 1, + dirsAffected: 3 + })) + } + finally { + fs.rmSync(workspaceDir, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/commands/SetCommand.ts b/sdk/src/commands/SetCommand.ts new file mode 100644 index 00000000..e69de29b diff --git a/sdk/src/commands/UnknownCommand.ts b/sdk/src/commands/UnknownCommand.ts new file mode 100644 index 00000000..7a530f42 --- /dev/null +++ b/sdk/src/commands/UnknownCommand.ts @@ -0,0 +1,34 @@ +import type {Command, CommandContext, CommandResult} from './Command' +import {buildUsageDiagnostic, diagnosticLines} from '@/diagnostics' + +/** + * Unknown command - displays error for unrecognized subcommands + */ +export class UnknownCommand implements Command { + readonly name = 'unknown' + + constructor(private readonly unknownCmd: string) { } + + async execute(ctx: CommandContext): Promise { + ctx.logger.error(buildUsageDiagnostic({ + code: 'UNKNOWN_COMMAND', + title: `Unknown tnmsc command: ${this.unknownCmd}`, + rootCause: diagnosticLines(`tnmsc does not recognize the "${this.unknownCmd}" subcommand.`), + exactFix: diagnosticLines('Run `tnmsc help` and invoke one of the supported commands.'), + possibleFixes: [ + diagnosticLines('Check the command spelling and remove unsupported aliases or flags.') + ], + details: { + command: this.unknownCmd + } + })) + ctx.logger.info('run "tnmsc help" for available commands') + + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: `Unknown command: ${this.unknownCmd}` + } + } +} diff --git a/sdk/src/commands/VersionCommand.ts b/sdk/src/commands/VersionCommand.ts new file mode 100644 index 00000000..6f03525e --- /dev/null +++ b/sdk/src/commands/VersionCommand.ts @@ -0,0 +1,29 @@ +import type {Command, CommandContext, CommandResult} from './Command' + +const CLI_NAME = 'tnmsc' + +/** + * Get CLI version from build-time injected constant. + * Falls back to 'unknown' in development mode. + */ +export function getCliVersion(): string { + return typeof __CLI_VERSION__ !== 'undefined' ? __CLI_VERSION__ : 'dev' +} + +/** + * Version command - displays CLI version + */ +export class VersionCommand implements Command { + readonly name = 'version' + + async execute(ctx: CommandContext): Promise { + ctx.logger.info(`${CLI_NAME} v${getCliVersion()}`) + + return { + success: true, + filesAffected: 0, + dirsAffected: 0, + message: 'Version displayed' + } + } +} diff --git a/sdk/src/commands/bridge.rs b/sdk/src/commands/bridge.rs new file mode 100644 index 00000000..d3d18de0 --- /dev/null +++ b/sdk/src/commands/bridge.rs @@ -0,0 +1,23 @@ +use std::process::ExitCode; + +use crate::bridge::node::run_node_command; + +pub fn execute(json_mode: bool) -> ExitCode { + run_node_command("execute", json_mode, &[]) +} + +pub fn dry_run(json_mode: bool) -> ExitCode { + run_node_command("dry-run", json_mode, &[]) +} + +pub fn clean(json_mode: bool) -> ExitCode { + run_node_command("clean", json_mode, &[]) +} + +pub fn dry_run_clean(json_mode: bool) -> ExitCode { + run_node_command("clean", json_mode, &["--dry-run"]) +} + +pub fn plugins(json_mode: bool) -> ExitCode { + run_node_command("plugins", json_mode, &[]) +} diff --git a/sdk/src/commands/config_cmd.rs b/sdk/src/commands/config_cmd.rs new file mode 100644 index 00000000..e7eb62b5 --- /dev/null +++ b/sdk/src/commands/config_cmd.rs @@ -0,0 +1,108 @@ +use std::process::ExitCode; + +use crate::diagnostic_helpers::{diagnostic, line, optional_details}; +use serde_json::json; +use tnmsc_logger::create_logger; + +use crate::core::config::{ConfigLoader, get_required_global_config_path}; + +pub fn execute(pairs: &[(String, String)]) -> ExitCode { + let logger = create_logger("config", None); + let result = match ConfigLoader::with_defaults().try_load(std::path::Path::new(".")) { + Ok(result) => result, + Err(error) => { + logger.error(diagnostic( + "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", + "Failed to resolve the global config path", + line("The runtime could not determine which global config file should be updated."), + Some(line( + "Ensure the required global config exists and retry the command.", + )), + None, + optional_details(json!({ "error": error })), + )); + return ExitCode::FAILURE; + } + }; + let mut config = result.config; + + for (key, value) in pairs { + match key.as_str() { + "workspaceDir" => config.workspace_dir = Some(value.clone()), + "logLevel" => config.log_level = Some(value.clone()), + _ => { + logger.warn(diagnostic( + "CONFIG_KEY_UNKNOWN", + "Unknown config key was ignored", + line("The provided config key is not supported by this command."), + Some(line( + "Use one of the supported keys: `workspaceDir`, `logLevel`.", + )), + None, + optional_details(json!({ "key": key })), + )); + } + } + } + + let config_path = match get_required_global_config_path() { + Ok(path) => path, + Err(error) => { + logger.error(diagnostic( + "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", + "Failed to resolve the global config path", + line("The runtime could not determine which global config file should be written."), + Some(line( + "Ensure the required global config exists and retry the command.", + )), + None, + optional_details(json!({ "error": error })), + )); + return ExitCode::FAILURE; + } + }; + match serde_json::to_string_pretty(&config) { + Ok(json) => { + if let Some(parent) = config_path.parent() { + let _ = std::fs::create_dir_all(parent); + } + match std::fs::write(&config_path, &json) { + Ok(()) => { + logger.info( + serde_json::Value::String(format!( + "Config saved to {}", + config_path.display() + )), + None, + ); + ExitCode::SUCCESS + } + Err(e) => { + logger.error(diagnostic( + "CONFIG_WRITE_FAILED", + "Failed to write the global config file", + line("The CLI generated the config JSON but could not write it to disk."), + Some(line("Check that the config path is writable and retry.")), + None, + optional_details(json!({ + "path": config_path.to_string_lossy(), + "error": e.to_string() + })), + )); + ExitCode::FAILURE + } + } + } + Err(e) => { + logger.error(diagnostic( + "CONFIG_SERIALIZATION_FAILED", + "Failed to serialize the config", + line("The config object could not be converted to JSON."), + None, + None, + optional_details(json!({ "error": e.to_string() })), + )); + ExitCode::FAILURE + } + } +} diff --git a/sdk/src/commands/config_show.rs b/sdk/src/commands/config_show.rs new file mode 100644 index 00000000..0c9be861 --- /dev/null +++ b/sdk/src/commands/config_show.rs @@ -0,0 +1,44 @@ +use std::process::ExitCode; + +use crate::diagnostic_helpers::{diagnostic, line, optional_details}; +use serde_json::json; +use tnmsc_logger::create_logger; + +use crate::core::config::ConfigLoader; + +pub fn execute() -> ExitCode { + let logger = create_logger("config-show", None); + let result = match ConfigLoader::with_defaults().try_load(std::path::Path::new(".")) { + Ok(result) => result, + Err(error) => { + logger.error(diagnostic( + "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", + "Failed to resolve the global config path", + line("The runtime could not determine which global config file should be shown."), + Some(line( + "Ensure the required global config exists and retry the command.", + )), + None, + optional_details(json!({ "error": error })), + )); + return ExitCode::FAILURE; + } + }; + match serde_json::to_string_pretty(&result.config) { + Ok(json) => { + println!("{json}"); + ExitCode::SUCCESS + } + Err(e) => { + logger.error(diagnostic( + "CONFIG_SERIALIZATION_FAILED", + "Failed to serialize the config", + line("The merged config could not be converted to JSON for display."), + None, + None, + optional_details(json!({ "error": e.to_string() })), + )); + ExitCode::FAILURE + } + } +} diff --git a/sdk/src/commands/factories/CleanCommandFactory.ts b/sdk/src/commands/factories/CleanCommandFactory.ts new file mode 100644 index 00000000..017d1025 --- /dev/null +++ b/sdk/src/commands/factories/CleanCommandFactory.ts @@ -0,0 +1,20 @@ +import type {Command} from '../Command' +import type {CommandFactory} from '../CommandFactory' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {CleanCommand} from '../CleanCommand' +import {DryRunCleanCommand} from '../DryRunCleanCommand' + +/** + * Factory for creating CleanCommand or DryRunCleanCommand + * Handles 'clean' subcommand with optional --dry-run flag + */ +export class CleanCommandFactory implements CommandFactory { + canHandle(args: ParsedCliArgs): boolean { + return args.subcommand === 'clean' + } + + createCommand(args: ParsedCliArgs): Command { + if (args.dryRun) return new DryRunCleanCommand() + return new CleanCommand() + } +} diff --git a/sdk/src/commands/factories/ConfigCommandFactory.ts b/sdk/src/commands/factories/ConfigCommandFactory.ts new file mode 100644 index 00000000..bc7b6fe0 --- /dev/null +++ b/sdk/src/commands/factories/ConfigCommandFactory.ts @@ -0,0 +1,29 @@ +import type {Command} from '../Command' +import type {CommandFactory} from '../CommandFactory' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {ConfigCommand} from '../ConfigCommand' +import {ConfigShowCommand} from '../ConfigShowCommand' + +/** + * Factory for creating ConfigCommand or ConfigShowCommand + * Handles 'config' subcommand with --show flag or key=value arguments + */ +export class ConfigCommandFactory implements CommandFactory { + canHandle(args: ParsedCliArgs): boolean { + return args.subcommand === 'config' + } + + createCommand(args: ParsedCliArgs): Command { + if (args.showFlag) { // Config --show subcommand + return new ConfigShowCommand() + } + + const parsedPositional: [key: string, value: string][] = [] // Parse positional arguments as key=value pairs + for (const arg of args.positional) { + const eqIndex = arg.indexOf('=') + if (eqIndex > 0) parsedPositional.push([arg.slice(0, eqIndex), arg.slice(eqIndex + 1)]) + } + + return new ConfigCommand([...args.setOption, ...parsedPositional]) + } +} diff --git a/sdk/src/commands/factories/DryRunCommandFactory.ts b/sdk/src/commands/factories/DryRunCommandFactory.ts new file mode 100644 index 00000000..232901ea --- /dev/null +++ b/sdk/src/commands/factories/DryRunCommandFactory.ts @@ -0,0 +1,19 @@ +import type {Command} from '../Command' +import type {CommandFactory} from '../CommandFactory' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {DryRunOutputCommand} from '../DryRunOutputCommand' + +/** + * Factory for creating DryRunOutputCommand + * Handles 'dry-run' subcommand + */ +export class DryRunCommandFactory implements CommandFactory { + canHandle(args: ParsedCliArgs): boolean { + return args.subcommand === 'dry-run' + } + + createCommand(args: ParsedCliArgs): Command { + void args + return new DryRunOutputCommand() + } +} diff --git a/sdk/src/commands/factories/ExecuteCommandFactory.ts b/sdk/src/commands/factories/ExecuteCommandFactory.ts new file mode 100644 index 00000000..d7a6f8dc --- /dev/null +++ b/sdk/src/commands/factories/ExecuteCommandFactory.ts @@ -0,0 +1,20 @@ +import type {Command} from '../Command' +import type {CommandFactory} from '../CommandFactory' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {ExecuteCommand} from '../ExecuteCommand' + +/** + * Factory for creating ExecuteCommand (default command) + * Handles default execution when no specific subcommand matches + */ +export class ExecuteCommandFactory implements CommandFactory { + canHandle(args: ParsedCliArgs): boolean { // This is a catch-all factory with lowest priority + void args + return true + } + + createCommand(args: ParsedCliArgs): Command { + void args + return new ExecuteCommand() + } +} diff --git a/sdk/src/commands/factories/HelpCommandFactory.ts b/sdk/src/commands/factories/HelpCommandFactory.ts new file mode 100644 index 00000000..3b4174a5 --- /dev/null +++ b/sdk/src/commands/factories/HelpCommandFactory.ts @@ -0,0 +1,22 @@ +import type {Command} from '../Command' +import type {PrioritizedCommandFactory} from '../CommandFactory' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {FactoryPriority} from '../CommandFactory' +import {HelpCommand} from '../HelpCommand' + +/** + * Factory for creating HelpCommand + * Handles --help flag and 'help' subcommand + */ +export class HelpCommandFactory implements PrioritizedCommandFactory { + readonly priority = FactoryPriority.Flags + + canHandle(args: ParsedCliArgs): boolean { + return args.helpFlag || args.subcommand === 'help' + } + + createCommand(args: ParsedCliArgs): Command { + void args + return new HelpCommand() + } +} diff --git a/sdk/src/commands/factories/InitCommandFactory.ts b/sdk/src/commands/factories/InitCommandFactory.ts new file mode 100644 index 00000000..71f55fca --- /dev/null +++ b/sdk/src/commands/factories/InitCommandFactory.ts @@ -0,0 +1,15 @@ +import type {Command} from '../Command' +import type {CommandFactory} from '../CommandFactory' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {InitCommand} from '../InitCommand' + +export class InitCommandFactory implements CommandFactory { + canHandle(args: ParsedCliArgs): boolean { + return args.subcommand === 'init' + } + + createCommand(args: ParsedCliArgs): Command { + void args + return new InitCommand() + } +} diff --git a/sdk/src/commands/factories/PluginsCommandFactory.ts b/sdk/src/commands/factories/PluginsCommandFactory.ts new file mode 100644 index 00000000..11b25ecb --- /dev/null +++ b/sdk/src/commands/factories/PluginsCommandFactory.ts @@ -0,0 +1,19 @@ +import type {Command} from '../Command' +import type {CommandFactory} from '../CommandFactory' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {PluginsCommand} from '../PluginsCommand' + +/** + * Factory for creating PluginsCommand + * Handles 'plugins' subcommand + */ +export class PluginsCommandFactory implements CommandFactory { + canHandle(args: ParsedCliArgs): boolean { + return args.subcommand === 'plugins' + } + + createCommand(args: ParsedCliArgs): Command { + void args + return new PluginsCommand() + } +} diff --git a/sdk/src/commands/factories/UnknownCommandFactory.ts b/sdk/src/commands/factories/UnknownCommandFactory.ts new file mode 100644 index 00000000..6c97fb62 --- /dev/null +++ b/sdk/src/commands/factories/UnknownCommandFactory.ts @@ -0,0 +1,22 @@ +import type {Command} from '../Command' +import type {PrioritizedCommandFactory} from '../CommandFactory' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {FactoryPriority} from '../CommandFactory' +import {UnknownCommand} from '../UnknownCommand' + +/** + * Factory for creating UnknownCommand + * Handles unknown/invalid subcommands + */ +export class UnknownCommandFactory implements PrioritizedCommandFactory { + readonly priority = FactoryPriority.Unknown + + canHandle(args: ParsedCliArgs): boolean { + return args.unknownCommand != null + } + + createCommand(args: ParsedCliArgs): Command { + if (args.unknownCommand == null) return new UnknownCommand('') + return new UnknownCommand(args.unknownCommand) + } +} diff --git a/sdk/src/commands/factories/VersionCommandFactory.ts b/sdk/src/commands/factories/VersionCommandFactory.ts new file mode 100644 index 00000000..95dbc123 --- /dev/null +++ b/sdk/src/commands/factories/VersionCommandFactory.ts @@ -0,0 +1,22 @@ +import type {Command} from '../Command' +import type {PrioritizedCommandFactory} from '../CommandFactory' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {FactoryPriority} from '../CommandFactory' +import {VersionCommand} from '../VersionCommand' + +/** + * Factory for creating VersionCommand + * Handles --version flag and 'version' subcommand + */ +export class VersionCommandFactory implements PrioritizedCommandFactory { + readonly priority = FactoryPriority.Flags + + canHandle(args: ParsedCliArgs): boolean { + return args.versionFlag || args.subcommand === 'version' + } + + createCommand(args: ParsedCliArgs): Command { + void args + return new VersionCommand() + } +} diff --git a/sdk/src/commands/help.rs b/sdk/src/commands/help.rs new file mode 100644 index 00000000..94b02bd9 --- /dev/null +++ b/sdk/src/commands/help.rs @@ -0,0 +1,26 @@ +use std::process::ExitCode; + +pub fn execute() -> ExitCode { + println!("tnmsc — Memory Sync CLI"); + println!(); + println!("USAGE:"); + println!(" tnmsc [OPTIONS] [COMMAND]"); + println!(); + println!("COMMANDS:"); + println!(" (default) Sync AI memory and configuration files"); + println!(" dry-run Preview changes without writing files"); + println!(" clean Remove all generated output files"); + println!(" config Set or show configuration values"); + println!(" plugins List all registered plugins"); + println!(" version Show version information"); + println!(" help Show this help message"); + println!(); + println!("OPTIONS:"); + println!(" -j, --json Output results as JSON"); + println!(" --trace Set log level to trace"); + println!(" --debug Set log level to debug"); + println!(" --info Set log level to info"); + println!(" --warn Set log level to warn"); + println!(" --error Set log level to error"); + ExitCode::SUCCESS +} diff --git a/sdk/src/commands/mod.rs b/sdk/src/commands/mod.rs new file mode 100644 index 00000000..cad337be --- /dev/null +++ b/sdk/src/commands/mod.rs @@ -0,0 +1,5 @@ +pub mod bridge; +pub mod config_cmd; +pub mod config_show; +pub mod help; +pub mod version; diff --git a/sdk/src/commands/version.rs b/sdk/src/commands/version.rs new file mode 100644 index 00000000..8321606a --- /dev/null +++ b/sdk/src/commands/version.rs @@ -0,0 +1,6 @@ +use std::process::ExitCode; + +pub fn execute() -> ExitCode { + println!("{}", env!("CARGO_PKG_VERSION")); + ExitCode::SUCCESS +} diff --git a/sdk/src/config.outputScopes.test.ts b/sdk/src/config.outputScopes.test.ts new file mode 100644 index 00000000..a5b9e7ae --- /dev/null +++ b/sdk/src/config.outputScopes.test.ts @@ -0,0 +1,45 @@ +import {describe, expect, it} from 'vitest' +import {mergeConfig} from './config' + +describe('mergeConfig outputScopes', () => { + it('merges plugin topic overrides deeply', () => { + const merged = mergeConfig( + { + outputScopes: { + plugins: { + CursorOutputPlugin: { + commands: 'global', + skills: ['workspace', 'global'] + } + } + } + }, + { + outputScopes: { + plugins: { + CursorOutputPlugin: { + rules: 'project', + skills: 'project' + }, + OpencodeCLIOutputPlugin: { + mcp: 'global' + } + } + } + } + ) + + expect(merged.outputScopes).toEqual({ + plugins: { + CursorOutputPlugin: { + commands: 'global', + skills: 'project', + rules: 'project' + }, + OpencodeCLIOutputPlugin: { + mcp: 'global' + } + } + }) + }) +}) diff --git a/sdk/src/config.plugins-fast-path.test.ts b/sdk/src/config.plugins-fast-path.test.ts new file mode 100644 index 00000000..6dc21219 --- /dev/null +++ b/sdk/src/config.plugins-fast-path.test.ts @@ -0,0 +1,50 @@ +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {afterEach, describe, expect, it, vi} from 'vitest' + +import {defineConfig} from './config' + +const {collectInputContextMock} = vi.hoisted(() => ({ + collectInputContextMock: vi.fn(async () => { + throw new Error('collectInputContext should not run for plugins fast path') + }) +})) + +vi.mock('./inputs/runtime', async importOriginal => { + const actual = await importOriginal() + + return { + ...actual, + collectInputContext: collectInputContextMock + } +}) + +afterEach(() => { + vi.clearAllMocks() +}) + +describe('defineConfig plugins fast path', () => { + it('skips input collection for plugins runtime commands', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-plugins-fast-path-')) + + try { + const result = await defineConfig({ + loadUserConfig: false, + pipelineArgs: ['node', 'tnmsc', 'plugins', '--json'], + pluginOptions: { + workspaceDir: tempWorkspace, + plugins: [] + } + }) + + expect(collectInputContextMock).not.toHaveBeenCalled() + expect(result.context.workspace.directory.path).toBe(tempWorkspace) + expect(result.context.aindexDir).toBe(path.join(tempWorkspace, 'aindex')) + expect(result.outputPlugins).toEqual([]) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/config.test.ts b/sdk/src/config.test.ts new file mode 100644 index 00000000..2b94ef42 --- /dev/null +++ b/sdk/src/config.test.ts @@ -0,0 +1,173 @@ +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {afterEach, describe, expect, it, vi} from 'vitest' +import {defineConfig} from './config' +import {WorkspaceInputCapability} from './inputs/input-workspace' + +describe('defineConfig', () => { + const originalHome = process.env.HOME + const originalUserProfile = process.env.USERPROFILE + const originalHomeDrive = process.env.HOMEDRIVE + const originalHomePath = process.env.HOMEPATH + + afterEach(() => { + process.env.HOME = originalHome + process.env.USERPROFILE = originalUserProfile + process.env.HOMEDRIVE = originalHomeDrive + process.env.HOMEPATH = originalHomePath + vi.restoreAllMocks() + }) + + it('loads config only from ~/.aindex/.tnmsc.json', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-workspace-')) + const tempHome = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-home-')) + const globalConfigDir = path.join(tempHome, '.aindex') + const globalConfigPath = path.join(globalConfigDir, '.tnmsc.json') + const localConfigPath = path.join(tempWorkspace, '.tnmsc.json') + + process.env.HOME = tempHome + process.env.USERPROFILE = tempHome + delete process.env.HOMEDRIVE + delete process.env.HOMEPATH + + fs.mkdirSync(globalConfigDir, {recursive: true}) + fs.writeFileSync(globalConfigPath, JSON.stringify({ + workspaceDir: tempWorkspace, + aindex: { + dir: 'aindex', + skills: {src: 'skills', dist: 'dist/skills'}, + commands: {src: 'commands', dist: 'dist/commands'}, + subAgents: {src: 'subagents', dist: 'dist/subagents'}, + rules: {src: 'rules', dist: 'dist/rules'}, + globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, + workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, + app: {src: 'app', dist: 'dist/app'}, + ext: {src: 'ext', dist: 'dist/ext'}, + arch: {src: 'arch', dist: 'dist/arch'}, + softwares: {src: 'softwares', dist: 'dist/softwares'} + }, + logLevel: 'info' + }), 'utf8') + fs.writeFileSync(localConfigPath, JSON.stringify({workspaceDir: '/wrong/workspace', logLevel: 'error'}), 'utf8') + + try { + const result = await defineConfig({cwd: tempWorkspace}) + + expect(result.userConfigOptions.workspaceDir).toBe(tempWorkspace) + expect(result.userConfigOptions.aindex.softwares).toEqual({src: 'softwares', dist: 'dist/softwares'}) + expect(result.context.workspace.directory.path).toBe(tempWorkspace) + expect(result.context.aindexDir).toBe(path.join(tempWorkspace, 'aindex')) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + fs.rmSync(tempHome, {recursive: true, force: true}) + } + }) + + it('passes pipeline args into public proxy resolution', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-public-proxy-command-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const publicDir = path.join(aindexDir, 'public') + + fs.mkdirSync(path.join(publicDir, 'execute'), {recursive: true}) + fs.mkdirSync(path.join(publicDir, 'dry-run'), {recursive: true}) + fs.writeFileSync(path.join(publicDir, 'proxy.ts'), [ + 'export default (_logicalPath, ctx) => ctx.command === "dry-run"', + ' ? "dry-run/gitignore"', + ' : "execute/gitignore"', + '' + ].join('\n'), 'utf8') + fs.writeFileSync(path.join(publicDir, 'execute', 'gitignore'), 'execute\n', 'utf8') + fs.writeFileSync(path.join(publicDir, 'dry-run', 'gitignore'), 'dry-run\n', 'utf8') + + try { + const result = await defineConfig({ + loadUserConfig: false, + pipelineArgs: ['node', 'tnmsc', 'dry-run'], + pluginOptions: { + workspaceDir: tempWorkspace + } + }) + + expect(result.context.globalGitIgnore).toBe('dry-run\n') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('does not run builtin mutating input effects when plugins is explicitly empty', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-explicit-empty-plugins-')) + const orphanSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'orphan-skill') + const orphanSkillFile = path.join(orphanSkillDir, 'SKILL.md') + + fs.mkdirSync(orphanSkillDir, {recursive: true}) + fs.writeFileSync(orphanSkillFile, 'orphan\n', 'utf8') + + try { + const result = await defineConfig({ + loadUserConfig: false, + pluginOptions: { + workspaceDir: tempWorkspace, + plugins: [] + } + }) + + expect(result.context.workspace.directory.path).toBe(tempWorkspace) + expect(fs.existsSync(orphanSkillFile)).toBe(true) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('does not run builtin mutating input effects when shorthand plugins is explicitly empty', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-shorthand-empty-plugins-')) + const tempHome = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-shorthand-empty-home-')) + const orphanSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'orphan-skill') + const orphanSkillFile = path.join(orphanSkillDir, 'SKILL.md') + + process.env.HOME = tempHome + process.env.USERPROFILE = tempHome + delete process.env.HOMEDRIVE + delete process.env.HOMEPATH + + fs.mkdirSync(orphanSkillDir, {recursive: true}) + fs.writeFileSync(orphanSkillFile, 'orphan\n', 'utf8') + + try { + const result = await defineConfig({ + workspaceDir: tempWorkspace, + plugins: [] + }) + + expect(result.context.workspace.directory.path).toBe(tempWorkspace) + expect(fs.existsSync(orphanSkillFile)).toBe(true) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + fs.rmSync(tempHome, {recursive: true, force: true}) + } + }) + + it('accepts legacy input capabilities in pluginOptions.plugins without crashing', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-legacy-input-capabilities-')) + + try { + const result = await defineConfig({ + loadUserConfig: false, + pluginOptions: { + workspaceDir: tempWorkspace, + plugins: [new WorkspaceInputCapability()] + } + }) + + expect(result.context.workspace.directory.path).toBe(tempWorkspace) + expect(result.outputPlugins).toEqual([]) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/config.ts b/sdk/src/config.ts new file mode 100644 index 00000000..eff2e696 --- /dev/null +++ b/sdk/src/config.ts @@ -0,0 +1,475 @@ +import type { + AindexConfig, + CleanupProtectionOptions, + CommandSeriesOptions, + CommandSeriesPluginOverride, + ConfigLoaderOptions, + InputCapability, + InputCollectedContext, + OutputCollectedContext, + OutputPlugin, + OutputScopeOptions, + PluginOptions, + PluginOutputScopeTopics, + UserConfigFile, + WindowsOptions +} from './plugins/plugin-core' +import * as path from 'node:path' +import {createLogger} from '@truenine/logger' +import {checkVersionControl} from './Aindex' +import {getConfigLoader} from './ConfigLoader' +import {collectInputContext, resolveRuntimeCommand} from './inputs/runtime' +import { + buildDefaultAindexConfig, + FilePathKind, + mergeAindexConfig, + PathPlaceholders, + toOutputCollectedContext, + validateOutputScopeOverridesForPlugins +} from './plugins/plugin-core' +import {resolveUserPath} from './runtime-environment' + +/** + * Pipeline configuration containing collected context and output plugins + */ +export interface PipelineConfig { + readonly context: OutputCollectedContext + readonly outputPlugins: readonly OutputPlugin[] + readonly userConfigOptions: Required +} + +interface ResolvedPluginSetup { + readonly mergedOptions: Required + readonly outputPlugins: readonly OutputPlugin[] + readonly inputCapabilities: readonly InputCapability[] + readonly userConfigFile?: UserConfigFile +} + +function isOutputPlugin(plugin: InputCapability | OutputPlugin): plugin is OutputPlugin { + return 'declarativeOutput' in plugin +} + +function isInputCapability(plugin: InputCapability | OutputPlugin): plugin is InputCapability { + return 'collect' in plugin && !isOutputPlugin(plugin) +} + +const DEFAULT_AINDEX: Required = buildDefaultAindexConfig() + +const DEFAULT_OPTIONS: Required = { + version: '0.0.0', + workspaceDir: '~/project', + logLevel: 'info', + aindex: DEFAULT_AINDEX, + commandSeriesOptions: {}, + outputScopes: {}, + frontMatter: { + blankLineAfter: true + }, + cleanupProtection: {}, + windows: {}, + plugins: [] +} + +/** + * Convert UserConfigFile to PluginOptions + * UserConfigFile is the JSON schema, PluginOptions includes plugins + */ +export function userConfigToPluginOptions(userConfig: UserConfigFile): Partial { + return { + ...userConfig.version != null ? {version: userConfig.version} : {}, + ...userConfig.workspaceDir != null ? {workspaceDir: userConfig.workspaceDir} : {}, + ...userConfig.aindex != null ? {aindex: userConfig.aindex} : {}, + ...userConfig.commandSeriesOptions != null ? {commandSeriesOptions: userConfig.commandSeriesOptions} : {}, + ...userConfig.outputScopes != null ? {outputScopes: userConfig.outputScopes} : {}, + ...userConfig.frontMatter != null ? {frontMatter: userConfig.frontMatter} : {}, + ...userConfig.cleanupProtection != null ? {cleanupProtection: userConfig.cleanupProtection} : {}, + ...userConfig.windows != null ? {windows: userConfig.windows} : {}, + ...userConfig.logLevel != null ? {logLevel: userConfig.logLevel} : {} + } +} + +/** + * Options for defineConfig + */ +export interface DefineConfigOptions { + readonly pluginOptions?: PluginOptions + + readonly configLoaderOptions?: ConfigLoaderOptions + + readonly loadUserConfig?: boolean + + readonly cwd?: string + + readonly pipelineArgs?: readonly string[] +} + +/** + * Merge multiple PluginOptions with default configuration. + * Later options override earlier ones. + * Similar to vite/vitest mergeConfig. + */ +export function mergeConfig( + ...configs: Partial[] +): Required { + const initialConfig: Required = {...DEFAULT_OPTIONS} + return configs.reduce( + (acc: Required, config) => mergeTwoConfigs(acc, config), + initialConfig + ) +} + +function mergeTwoConfigs( + base: Required, + override: Partial +): Required { + const overridePlugins = override.plugins + const overrideCommandSeries = override.commandSeriesOptions + const overrideOutputScopes = override.outputScopes + const overrideFrontMatter = override.frontMatter + const overrideCleanupProtection = override.cleanupProtection + const overrideWindows = override.windows + + return { + ...base, + ...override, + aindex: mergeAindexConfig(base.aindex, override.aindex), + plugins: [ // Array concatenation for plugins + ...base.plugins, + ...overridePlugins ?? [] + ], + commandSeriesOptions: mergeCommandSeriesOptions(base.commandSeriesOptions, overrideCommandSeries), // Deep merge for commandSeriesOptions + outputScopes: mergeOutputScopeOptions(base.outputScopes, overrideOutputScopes), + frontMatter: mergeFrontMatterOptions(base.frontMatter, overrideFrontMatter), + cleanupProtection: mergeCleanupProtectionOptions(base.cleanupProtection, overrideCleanupProtection), + windows: mergeWindowsOptions(base.windows, overrideWindows) + } +} + +function mergeCommandSeriesOptions( + base?: CommandSeriesOptions, + override?: CommandSeriesOptions +): CommandSeriesOptions { + if (override == null) return base ?? {} + if (base == null) return override + + const mergedPluginOverrides: Record = {} // Merge pluginOverrides deeply + + if (base.pluginOverrides != null) { // Copy base plugin overrides + for (const [key, value] of Object.entries(base.pluginOverrides)) mergedPluginOverrides[key] = {...value} + } + + if (override.pluginOverrides != null) { // Merge override plugin overrides + for (const [key, value] of Object.entries(override.pluginOverrides)) { + mergedPluginOverrides[key] = { + ...mergedPluginOverrides[key], + ...value + } + } + } + + const includeSeriesPrefix = override.includeSeriesPrefix ?? base.includeSeriesPrefix // Build result with conditional properties to satisfy exactOptionalPropertyTypes + const hasPluginOverrides = Object.keys(mergedPluginOverrides).length > 0 + + if (includeSeriesPrefix != null && hasPluginOverrides) return {includeSeriesPrefix, pluginOverrides: mergedPluginOverrides} + if (includeSeriesPrefix != null) return {includeSeriesPrefix} + if (hasPluginOverrides) return {pluginOverrides: mergedPluginOverrides} + return {} +} + +function mergeOutputScopeTopics( + base?: PluginOutputScopeTopics, + override?: PluginOutputScopeTopics +): PluginOutputScopeTopics | undefined { + if (base == null && override == null) return void 0 + if (base == null) return override + if (override == null) return base + return {...base, ...override} +} + +function mergeOutputScopeOptions( + base?: OutputScopeOptions, + override?: OutputScopeOptions +): OutputScopeOptions { + if (override == null) return base ?? {} + if (base == null) return override + + const mergedPlugins: Record = {} + if (base.plugins != null) { + for (const [pluginName, topics] of Object.entries(base.plugins)) { + if (topics != null) mergedPlugins[pluginName] = {...topics} + } + } + if (override.plugins != null) { + for (const [pluginName, topics] of Object.entries(override.plugins)) { + const mergedTopics = mergeOutputScopeTopics(mergedPlugins[pluginName], topics) + if (mergedTopics != null) mergedPlugins[pluginName] = mergedTopics + } + } + + if (Object.keys(mergedPlugins).length === 0) return {} + return {plugins: mergedPlugins} +} + +function mergeFrontMatterOptions( + base: Required['frontMatter'], + override?: PluginOptions['frontMatter'] +): Required['frontMatter'] { + if (override == null) return base + return { + ...base, + ...override + } +} + +function mergeCleanupProtectionOptions( + base?: CleanupProtectionOptions, + override?: CleanupProtectionOptions +): CleanupProtectionOptions { + if (override == null) return base ?? {} + if (base == null) return override + + return { + rules: [ + ...base.rules ?? [], + ...override.rules ?? [] + ] + } +} + +function mergeWindowsOptions( + base?: WindowsOptions, + override?: WindowsOptions +): WindowsOptions { + if (override == null) return base ?? {} + if (base == null) return override + + const baseWsl2 = base.wsl2 + const overrideWsl2 = override.wsl2 + + return { + ...base, + ...override, + ...baseWsl2 != null || overrideWsl2 != null + ? { + wsl2: { + ...baseWsl2, + ...overrideWsl2 + } + } + : {} + } +} + +/** + * Check if options is DefineConfigOptions + */ +function isDefineConfigOptions(options: PluginOptions | DefineConfigOptions): options is DefineConfigOptions { + return 'pluginOptions' in options + || 'configLoaderOptions' in options + || 'loadUserConfig' in options + || 'cwd' in options + || 'pipelineArgs' in options +} + +function getProgrammaticPluginDeclaration( + options: PluginOptions | DefineConfigOptions +): { + readonly hasExplicitProgrammaticPlugins: boolean + readonly explicitProgrammaticPlugins?: PluginOptions['plugins'] +} { + if (isDefineConfigOptions(options)) { + return { + hasExplicitProgrammaticPlugins: Object.hasOwn(options.pluginOptions ?? {}, 'plugins'), + explicitProgrammaticPlugins: options.pluginOptions?.plugins + } + } + + return { + hasExplicitProgrammaticPlugins: Object.hasOwn(options, 'plugins'), + explicitProgrammaticPlugins: options.plugins + } +} + +function resolvePathForMinimalContext(rawPath: string, workspaceDir: string): string { + let resolvedPath = rawPath + + if (resolvedPath.includes(PathPlaceholders.WORKSPACE)) { + resolvedPath = resolvedPath.replace(PathPlaceholders.WORKSPACE, workspaceDir) + } + + return path.normalize(resolveUserPath(resolvedPath)) +} + +function createMinimalOutputCollectedContext( + options: Required +): OutputCollectedContext { + const workspaceDir = resolvePathForMinimalContext(options.workspaceDir, '') + const aindexDir = path.join(workspaceDir, options.aindex.dir) + + return toOutputCollectedContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir) + }, + projects: [] + }, + aindexDir + }) +} + +function shouldUsePluginsFastPath(pipelineArgs?: readonly string[]): boolean { + return resolveRuntimeCommand(pipelineArgs) === 'plugins' +} + +async function resolvePluginSetup( + options: PluginOptions | DefineConfigOptions = {} +): Promise< + ResolvedPluginSetup & { + readonly pipelineArgs?: readonly string[] + readonly userConfigFound: boolean + readonly userConfigSources: readonly string[] + } +> { + let shouldLoadUserConfig: boolean, + cwd: string | undefined, + pluginOptions: PluginOptions, + configLoaderOptions: ConfigLoaderOptions | undefined, + pipelineArgs: readonly string[] | undefined + + if (isDefineConfigOptions(options)) { + ({ + pluginOptions = {}, + cwd, + configLoaderOptions, + pipelineArgs + } = { + pluginOptions: options.pluginOptions, + cwd: options.cwd, + configLoaderOptions: options.configLoaderOptions, + pipelineArgs: options.pipelineArgs + }) + shouldLoadUserConfig = options.loadUserConfig ?? true + } else { + pluginOptions = options + shouldLoadUserConfig = true + configLoaderOptions = void 0 + pipelineArgs = void 0 + } + + let userConfigOptions: Partial = {} + let userConfigFound = false + let userConfigSources: readonly string[] = [] + let userConfigFile: UserConfigFile | undefined + + if (shouldLoadUserConfig) { + try { + const userConfigResult = getConfigLoader(configLoaderOptions).load(cwd) + userConfigFound = userConfigResult.found + userConfigSources = userConfigResult.sources + if (userConfigResult.found) { + userConfigOptions = userConfigToPluginOptions(userConfigResult.config) + userConfigFile = userConfigResult.config + } + } + catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + throw new Error(`Failed to load user config: ${errorMessage}`) + } + } + + const mergedOptions = mergeConfig(userConfigOptions, pluginOptions) + const {plugins = [], logLevel} = mergedOptions + const logger = createLogger('defineConfig', logLevel) + + if (userConfigFound) { + logger.info('user config loaded', {sources: userConfigSources}) + } else { + logger.info('no user config found, using defaults/programmatic options', { + workspaceDir: mergedOptions.workspaceDir, + aindexDir: mergedOptions.aindex.dir, + logLevel: mergedOptions.logLevel + }) + } + + const outputPlugins = plugins.filter(isOutputPlugin) + const inputCapabilities = plugins.filter(isInputCapability) + validateOutputScopeOverridesForPlugins(outputPlugins, mergedOptions) + + return { + mergedOptions, + outputPlugins, + inputCapabilities, + ...userConfigFile != null && {userConfigFile}, + ...pipelineArgs != null && {pipelineArgs}, + userConfigFound, + userConfigSources + } +} + +/** + * Define configuration with support for user config files. + * + * Configuration priority (highest to lowest): + * 1. Programmatic options passed to defineConfig + * 2. Global config file (~/.aindex/.tnmsc.json) + * 3. Default values + * + * @param options - Plugin options or DefineConfigOptions + */ +export async function defineConfig(options: PluginOptions | DefineConfigOptions = {}): Promise { + const { + hasExplicitProgrammaticPlugins, + explicitProgrammaticPlugins + } = getProgrammaticPluginDeclaration(options) + const { + mergedOptions, + outputPlugins, + inputCapabilities, + userConfigFile, + pipelineArgs + } = await resolvePluginSetup(options) + const logger = createLogger('defineConfig', mergedOptions.logLevel) + + if (shouldUsePluginsFastPath(pipelineArgs)) { + const context = createMinimalOutputCollectedContext(mergedOptions) + return {context, outputPlugins, userConfigOptions: mergedOptions} + } + + const merged = await collectInputContext({ + userConfigOptions: mergedOptions, + ...inputCapabilities.length > 0 ? {capabilities: inputCapabilities} : {}, + includeBuiltinEffects: !(inputCapabilities.length > 0 || (hasExplicitProgrammaticPlugins && (explicitProgrammaticPlugins?.length ?? 0) === 0)), + ...pipelineArgs != null ? {pipelineArgs} : {}, + ...userConfigFile != null ? {userConfig: userConfigFile} : {} + }) + + if (merged.workspace == null) throw new Error('Workspace not initialized by any plugin') + + const inputContext: InputCollectedContext = { + workspace: merged.workspace, + ...merged.vscodeConfigFiles != null && {vscodeConfigFiles: merged.vscodeConfigFiles}, + ...merged.zedConfigFiles != null && {zedConfigFiles: merged.zedConfigFiles}, + ...merged.jetbrainsConfigFiles != null && {jetbrainsConfigFiles: merged.jetbrainsConfigFiles}, + ...merged.editorConfigFiles != null && {editorConfigFiles: merged.editorConfigFiles}, + ...merged.commands != null && {commands: merged.commands}, + ...merged.subAgents != null && {subAgents: merged.subAgents}, + ...merged.skills != null && {skills: merged.skills}, + ...merged.rules != null && {rules: merged.rules}, + ...merged.globalMemory != null && {globalMemory: merged.globalMemory}, + ...merged.aiAgentIgnoreConfigFiles != null && {aiAgentIgnoreConfigFiles: merged.aiAgentIgnoreConfigFiles}, + ...merged.aindexDir != null && {aindexDir: merged.aindexDir}, + ...merged.readmePrompts != null && {readmePrompts: merged.readmePrompts}, + ...merged.globalGitIgnore != null && {globalGitIgnore: merged.globalGitIgnore}, + ...merged.shadowGitExclude != null && {shadowGitExclude: merged.shadowGitExclude} + } + + const context = toOutputCollectedContext(inputContext) + + if (merged.aindexDir != null) { + checkVersionControl(merged.aindexDir, logger) + } + + return {context, outputPlugins, userConfigOptions: mergedOptions} +} diff --git a/sdk/src/core/cleanup.rs b/sdk/src/core/cleanup.rs new file mode 100644 index 00000000..367b79c9 --- /dev/null +++ b/sdk/src/core/cleanup.rs @@ -0,0 +1,2309 @@ +use std::collections::{BTreeSet, HashMap, HashSet}; +use std::env; +use std::fs; +use std::path::{Component, Path, PathBuf}; + +use globset::{Glob, GlobBuilder, GlobSet, GlobSetBuilder}; +use serde::{Deserialize, Serialize}; +use walkdir::WalkDir; + +use crate::core::{config, desk_paths}; + +const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS: [&str; 6] = [ + "**/node_modules/**", + "**/.git/**", + "**/.turbo/**", + "**/.pnpm-store/**", + "**/.yarn/**", + "**/.next/**", +]; + +const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES: [&str; 17] = [ + ".git", + "node_modules", + "dist", + "target", + ".next", + ".turbo", + "coverage", + ".nyc_output", + ".cache", + ".vite", + ".vite-temp", + ".pnpm-store", + ".yarn", + ".idea", + ".vscode", + ".volumes", + "volumes", +]; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ProtectionModeDto { + Direct, + Recursive, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ProtectionRuleMatcherDto { + Path, + Glob, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum CleanupTargetKindDto { + File, + Directory, + Glob, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum CleanupErrorKindDto { + File, + Directory, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupTargetDto { + pub path: String, + pub kind: CleanupTargetKindDto, + #[serde(default)] + pub exclude_basenames: Vec, + pub protection_mode: Option, + pub scope: Option, + pub label: Option, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupDeclarationsDto { + #[serde(default)] + pub delete: Vec, + #[serde(default)] + pub protect: Vec, + #[serde(default)] + pub exclude_scan_globs: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PluginCleanupSnapshotDto { + pub plugin_name: String, + #[serde(default)] + pub outputs: Vec, + #[serde(default)] + pub cleanup: CleanupDeclarationsDto, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProtectedRuleDto { + pub path: String, + pub protection_mode: ProtectionModeDto, + pub reason: String, + pub source: String, + pub matcher: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupSnapshot { + pub workspace_dir: String, + pub aindex_dir: Option, + #[serde(default)] + pub project_roots: Vec, + #[serde(default)] + pub protected_rules: Vec, + #[serde(default)] + pub plugin_snapshots: Vec, + /// Glob patterns from aindex.config.ts that should be excluded from + /// the empty-directory scanner (git-style ** patterns supported). + #[serde(default)] + pub empty_dir_exclude_globs: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProtectedPathViolationDto { + pub target_path: String, + pub protected_path: String, + pub protection_mode: ProtectionModeDto, + pub reason: String, + pub source: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupProtectionConflictDto { + pub output_path: String, + pub output_plugin: String, + pub protected_path: String, + pub protection_mode: ProtectionModeDto, + pub protected_by: String, + pub reason: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupPlan { + pub files_to_delete: Vec, + pub dirs_to_delete: Vec, + pub empty_dirs_to_delete: Vec, + pub violations: Vec, + pub conflicts: Vec, + pub excluded_scan_globs: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupErrorDto { + pub path: String, + pub kind: CleanupErrorKindDto, + pub error: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupExecutionResultDto { + pub deleted_files: usize, + pub deleted_dirs: usize, + pub errors: Vec, + pub violations: Vec, + pub conflicts: Vec, + pub files_to_delete: Vec, + pub dirs_to_delete: Vec, + pub empty_dirs_to_delete: Vec, + pub excluded_scan_globs: Vec, +} + +#[derive(Debug, Clone)] +struct CompiledProtectedRule { + path: String, + protection_mode: ProtectionModeDto, + reason: String, + source: String, + comparison_keys: Vec, + normalized_path: String, + specificity: usize, +} + +#[derive(Debug, Clone)] +struct ProtectedDeletionGuard { + compiled_rules: Vec, +} + +struct PartitionResult { + safe_paths: Vec, + violations: Vec, +} + +fn resolve_home_dir() -> PathBuf { + let runtime_environment = config::resolve_runtime_environment(); + runtime_environment + .effective_home_dir + .or(runtime_environment.native_home_dir) + .unwrap_or_else(|| PathBuf::from("/")) +} + +fn expand_home_path(raw_path: &str) -> PathBuf { + if raw_path == "~" || raw_path.starts_with("~/") || raw_path.starts_with("~\\") { + return config::resolve_tilde(raw_path); + } + PathBuf::from(raw_path) +} + +fn normalize_path(path: &Path) -> PathBuf { + let mut normalized = PathBuf::new(); + + for component in path.components() { + match component { + Component::Prefix(prefix) => normalized.push(prefix.as_os_str()), + Component::RootDir => normalized.push(Path::new(std::path::MAIN_SEPARATOR_STR)), + Component::CurDir => {} + Component::ParentDir => { + let popped = normalized.pop(); + if !popped && !path.is_absolute() { + normalized.push(".."); + } + } + Component::Normal(segment) => normalized.push(segment), + } + } + + if normalized.as_os_str().is_empty() { + if path.is_absolute() { + return PathBuf::from(std::path::MAIN_SEPARATOR_STR); + } + return PathBuf::from("."); + } + + normalized +} + +fn resolve_absolute_path(raw_path: &str) -> PathBuf { + let expanded = expand_home_path(raw_path); + let candidate = if expanded.is_absolute() { + expanded + } else { + env::current_dir() + .unwrap_or_else(|_| PathBuf::from(".")) + .join(expanded) + }; + + normalize_path(&candidate) +} + +fn path_to_string(path: &Path) -> String { + normalize_path(path).to_string_lossy().into_owned() +} + +fn path_to_glob_string(path: &Path) -> String { + path_to_string(path).replace('\\', "/") +} + +fn normalize_glob_pattern(pattern: &str) -> String { + path_to_glob_string(&resolve_absolute_path(pattern)) +} + +fn normalize_relative_glob_pattern(pattern: &str) -> String { + let normalized = pattern.replace('\\', "/"); + let normalized = normalized.trim_start_matches("./"); + normalized.trim_start_matches('/').to_string() +} + +fn normalize_workspace_relative_path(path: &Path, workspace_dir: &Path) -> Option { + let relative = path.strip_prefix(workspace_dir).ok()?; + let relative = path_to_glob_string(relative); + Some(relative.trim_start_matches('/').to_string()) +} + +fn normalize_for_comparison(raw_path: &str) -> String { + let normalized = path_to_string(&resolve_absolute_path(raw_path)); + if cfg!(windows) { + normalized.to_lowercase() + } else { + normalized + } +} + +fn build_comparison_keys(raw_path: &str) -> Vec { + let absolute = resolve_absolute_path(raw_path); + let mut keys = HashSet::from([normalize_for_comparison(&path_to_string(&absolute))]); + + if let Ok(real_path) = fs::canonicalize(&absolute) { + keys.insert(normalize_for_comparison(&path_to_string(&real_path))); + } + + let mut collected = keys.into_iter().collect::>(); + collected.sort(); + collected +} + +fn is_same_or_child_path(candidate: &str, parent: &str) -> bool { + if candidate == parent { + return true; + } + + let separator = std::path::MAIN_SEPARATOR; + let prefix = if parent.ends_with(separator) { + parent.to_string() + } else { + format!("{parent}{separator}") + }; + + candidate.starts_with(&prefix) +} + +fn create_protected_rule( + raw_path: &str, + protection_mode: ProtectionModeDto, + reason: impl Into, + source: impl Into, + matcher: Option, +) -> ProtectedRuleDto { + ProtectedRuleDto { + path: path_to_string(&resolve_absolute_path(raw_path)), + protection_mode, + reason: reason.into(), + source: source.into(), + matcher, + } +} + +fn compile_rule(rule: &ProtectedRuleDto) -> CompiledProtectedRule { + let normalized_path = normalize_for_comparison(&rule.path); + CompiledProtectedRule { + path: path_to_string(&resolve_absolute_path(&rule.path)), + protection_mode: rule.protection_mode, + reason: rule.reason.clone(), + source: rule.source.clone(), + comparison_keys: build_comparison_keys(&rule.path), + specificity: normalized_path + .trim_end_matches(std::path::MAIN_SEPARATOR) + .len(), + normalized_path, + } +} + +fn dedupe_and_compile_rules(rules: &[ProtectedRuleDto]) -> Vec { + let mut compiled_by_key = HashMap::new(); + + for rule in rules { + let compiled = compile_rule(rule); + compiled_by_key.insert( + format!( + "{}:{}", + match compiled.protection_mode { + ProtectionModeDto::Direct => "direct", + ProtectionModeDto::Recursive => "recursive", + }, + compiled.normalized_path + ), + compiled, + ); + } + + let mut compiled = compiled_by_key.into_values().collect::>(); + compiled.sort_by(|a, b| { + b.specificity + .cmp(&a.specificity) + .then_with(|| match (a.protection_mode, b.protection_mode) { + (ProtectionModeDto::Recursive, ProtectionModeDto::Direct) => { + std::cmp::Ordering::Less + } + (ProtectionModeDto::Direct, ProtectionModeDto::Recursive) => { + std::cmp::Ordering::Greater + } + _ => std::cmp::Ordering::Equal, + }) + .then_with(|| a.path.cmp(&b.path)) + }); + compiled +} + +fn glob_builder(pattern: &str) -> Result { + GlobBuilder::new(pattern) + .literal_separator(true) + .backslash_escape(false) + .case_insensitive(cfg!(windows)) + .build() + .map_err(|error| error.to_string()) +} + +fn build_globset(patterns: &[String]) -> Result, String> { + if patterns.is_empty() { + return Ok(None); + } + + let mut builder = GlobSetBuilder::new(); + for pattern in patterns { + builder.add(glob_builder(pattern)?); + } + builder.build().map(Some).map_err(|error| error.to_string()) +} + +fn has_glob_magic(value: &str) -> bool { + value.contains('*') + || value.contains('?') + || value.contains('[') + || value.contains(']') + || value.contains('{') + || value.contains('}') + || value.contains('!') +} + +fn detect_glob_scan_root(pattern: &str) -> PathBuf { + let normalized = pattern.replace('\\', "/"); + if !has_glob_magic(&normalized) { + return resolve_absolute_path(&normalized); + } + + let first_magic_index = normalized + .char_indices() + .find_map(|(index, character)| has_glob_magic(&character.to_string()).then_some(index)) + .unwrap_or(normalized.len()); + + let prefix = normalized[..first_magic_index].trim_end_matches('/'); + if prefix.is_empty() { + return env::current_dir().unwrap_or_else(|_| PathBuf::from(".")); + } + + let scan_root = prefix.rsplit_once('/').map_or(prefix, |(head, _)| { + if head.is_empty() { + if normalized.starts_with('/') { + "/" + } else { + prefix + } + } else { + head + } + }); + + resolve_absolute_path(scan_root) +} + +/// A group of glob patterns that share the same scan root and ignore globs. +/// All patterns in the group are evaluated in a single directory walk. +#[derive(Debug, Clone)] +struct GlobGroup { + scan_root: PathBuf, + pattern_indices: Vec, +} + +/// Metadata associated with each glob pattern for result fan-out. +#[derive(Debug, Clone)] +struct GlobTargetMetadata { + is_protected: bool, + target_index: usize, + exclude_basenames: Vec, +} + +type GlobMatchResults = Vec<(usize, Vec)>; +type BatchedGlobExecutionResult = (GlobMatchResults, GlobMatchResults); + +/// Batched glob planner that groups patterns by scan root and ignore set. +/// This reduces the number of directory walks from O(patterns) to O(unique scan roots). +#[derive(Debug)] +struct BatchedGlobPlanner { + ignore_matcher: Option, + groups: Vec, + normalized_patterns: Vec, + metadata: Vec, +} + +impl BatchedGlobPlanner { + fn new(ignore_globs: &[String]) -> Result { + Ok(Self { + ignore_matcher: build_globset(ignore_globs)?, + groups: Vec::new(), + normalized_patterns: Vec::new(), + metadata: Vec::new(), + }) + } + + /// Add a glob pattern to the planner with its associated metadata. + fn add_pattern( + &mut self, + pattern: &str, + is_protected: bool, + target_index: usize, + exclude_basenames: Vec, + ) { + let normalized = normalize_glob_pattern(pattern); + let pattern_index = self.normalized_patterns.len(); + self.normalized_patterns.push(normalized.clone()); + self.metadata.push(GlobTargetMetadata { + is_protected, + target_index, + exclude_basenames, + }); + + // Non-glob patterns (literal paths) don't need directory scanning + if !has_glob_magic(&normalized) { + return; + } + + let scan_root = detect_glob_scan_root(&normalized); + let scan_root_str = path_to_string(&scan_root); + + // Find or create a group for this scan root + if let Some(group) = self + .groups + .iter_mut() + .find(|g| path_to_string(&g.scan_root) == scan_root_str) + { + group.pattern_indices.push(pattern_index); + } else { + self.groups.push(GlobGroup { + scan_root, + pattern_indices: vec![pattern_index], + }); + } + } + + /// Execute the batched glob expansion and fan results back to targets. + /// Returns (protected_matches, delete_matches) where each is a vec of (target_index, matched_paths). + fn execute(&self) -> Result { + let mut protected_results: HashMap> = HashMap::new(); + let mut delete_results: HashMap> = HashMap::new(); + + // Process literal paths (non-glob patterns) directly + for (pattern_index, pattern) in self.normalized_patterns.iter().enumerate() { + if has_glob_magic(pattern) { + continue; + } + + let absolute_path = resolve_absolute_path(pattern); + if !absolute_path.exists() { + continue; + } + + let candidate = path_to_glob_string(&absolute_path); + if self + .ignore_matcher + .as_ref() + .is_some_and(|compiled| compiled.is_match(&candidate)) + { + continue; + } + + let metadata = &self.metadata[pattern_index]; + let normalized_entry = path_to_string(&absolute_path); + + // Check exclude_basenames for delete targets + if !metadata.is_protected + && !metadata.exclude_basenames.is_empty() + && let Some(basename) = Path::new(&normalized_entry).file_name() + { + let basename_str = basename.to_string_lossy(); + if metadata + .exclude_basenames + .iter() + .any(|excluded| excluded == basename_str.as_ref()) + { + continue; + } + } + + let target_map = if metadata.is_protected { + &mut protected_results + } else { + &mut delete_results + }; + target_map + .entry(metadata.target_index) + .or_default() + .push(normalized_entry); + } + + // Process each group's patterns with a single directory walk + for group in &self.groups { + if !group.scan_root.exists() { + continue; + } + + let group_patterns: Vec = group + .pattern_indices + .iter() + .map(|&idx| self.normalized_patterns[idx].clone()) + .collect(); + + let matcher = build_globset(&group_patterns)? + .ok_or_else(|| "failed to compile cleanup glob batch".to_string())?; + + let walker = WalkDir::new(&group.scan_root) + .follow_links(false) + .into_iter() + .filter_entry(|entry| { + let candidate = path_to_glob_string(entry.path()); + !self + .ignore_matcher + .as_ref() + .is_some_and(|compiled| compiled.is_match(&candidate)) + }); + + for entry in walker { + let Ok(entry) = entry else { + continue; + }; + + let candidate = path_to_glob_string(entry.path()); + let matched_indices = matcher.matches(&candidate); + if matched_indices.is_empty() { + continue; + } + + let normalized_entry = path_to_string(&normalize_path(entry.path())); + + for matched_index in matched_indices { + let pattern_index = group.pattern_indices[matched_index]; + let metadata = &self.metadata[pattern_index]; + + // Check exclude_basenames for delete targets + if !metadata.is_protected + && !metadata.exclude_basenames.is_empty() + && let Some(basename) = Path::new(&normalized_entry).file_name() + { + let basename_str = basename.to_string_lossy(); + if metadata + .exclude_basenames + .iter() + .any(|excluded| excluded == basename_str.as_ref()) + { + continue; + } + } + + let target_map = if metadata.is_protected { + &mut protected_results + } else { + &mut delete_results + }; + target_map + .entry(metadata.target_index) + .or_default() + .push(normalized_entry.clone()); + } + } + } + + // Convert HashMaps to sorted Vecs and deduplicate + let mut protected_vec: Vec<(usize, Vec)> = protected_results + .into_iter() + .map(|(idx, mut paths)| { + paths.sort(); + paths.dedup(); + (idx, paths) + }) + .collect(); + protected_vec.sort_by_key(|(idx, _)| *idx); + + let mut delete_vec: Vec<(usize, Vec)> = delete_results + .into_iter() + .map(|(idx, mut paths)| { + paths.sort(); + paths.dedup(); + (idx, paths) + }) + .collect(); + delete_vec.sort_by_key(|(idx, _)| *idx); + + Ok((protected_vec, delete_vec)) + } +} + +/// Legacy function kept for backward compatibility with expand_protected_rules. +/// Prefer using BatchedGlobPlanner for new code. +fn expand_globs(patterns: &[String], ignore_globs: &[String]) -> Result>, String> { + if patterns.is_empty() { + return Ok(Vec::new()); + } + + let mut planner = BatchedGlobPlanner::new(ignore_globs)?; + for (index, pattern) in patterns.iter().enumerate() { + planner.add_pattern(pattern, false, index, Vec::new()); + } + + let (_, delete_results) = planner.execute()?; + let mut matches_by_pattern = vec![Vec::new(); patterns.len()]; + for (target_index, paths) in delete_results { + matches_by_pattern[target_index] = paths; + } + + Ok(matches_by_pattern) +} + +fn expand_protected_rules(rules: &[ProtectedRuleDto]) -> Result, String> { + let mut expanded = Vec::new(); + let mut glob_rules = Vec::new(); + + for rule in rules { + if !matches!(rule.matcher, Some(ProtectionRuleMatcherDto::Glob)) { + expanded.push(create_protected_rule( + &rule.path, + rule.protection_mode, + rule.reason.clone(), + rule.source.clone(), + None, + )); + continue; + } + glob_rules.push(rule.clone()); + } + + let matched_paths_by_rule = expand_globs( + &glob_rules + .iter() + .map(|rule| rule.path.clone()) + .collect::>(), + &[], + )?; + for (rule, matched_paths) in glob_rules.iter().zip(matched_paths_by_rule) { + for matched_path in matched_paths { + expanded.push(create_protected_rule( + &matched_path, + rule.protection_mode, + rule.reason.clone(), + rule.source.clone(), + None, + )); + } + } + + Ok(expanded) +} + +fn root_path_for(path: &Path) -> PathBuf { + let mut root = PathBuf::new(); + for component in path.components() { + match component { + Component::Prefix(prefix) => root.push(prefix.as_os_str()), + Component::RootDir => { + root.push(Path::new(std::path::MAIN_SEPARATOR_STR)); + break; + } + _ => break, + } + } + if root.as_os_str().is_empty() { + return PathBuf::from(std::path::MAIN_SEPARATOR_STR); + } + root +} + +fn collect_built_in_dangerous_path_rules() -> Vec { + let home_dir = resolve_home_dir(); + let xdg_config_home = env::var("XDG_CONFIG_HOME") + .ok() + .filter(|value| !value.trim().is_empty()) + .map(|value| resolve_absolute_path(&value)) + .unwrap_or_else(|| home_dir.join(".config")); + let xdg_data_home = env::var("XDG_DATA_HOME") + .ok() + .filter(|value| !value.trim().is_empty()) + .map(|value| resolve_absolute_path(&value)) + .unwrap_or_else(|| home_dir.join(".local/share")); + let xdg_state_home = env::var("XDG_STATE_HOME") + .ok() + .filter(|value| !value.trim().is_empty()) + .map(|value| resolve_absolute_path(&value)) + .unwrap_or_else(|| home_dir.join(".local/state")); + let xdg_cache_home = env::var("XDG_CACHE_HOME") + .ok() + .filter(|value| !value.trim().is_empty()) + .map(|value| resolve_absolute_path(&value)) + .unwrap_or_else(|| home_dir.join(".cache")); + + vec![ + create_protected_rule( + &path_to_string(&root_path_for(&home_dir)), + ProtectionModeDto::Direct, + "built-in dangerous root path", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&home_dir), + ProtectionModeDto::Direct, + "built-in dangerous home directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&xdg_config_home), + ProtectionModeDto::Direct, + "built-in dangerous config directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&xdg_data_home), + ProtectionModeDto::Direct, + "built-in dangerous data directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&xdg_state_home), + ProtectionModeDto::Direct, + "built-in dangerous state directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&xdg_cache_home), + ProtectionModeDto::Direct, + "built-in dangerous cache directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&home_dir.join(".aindex")), + ProtectionModeDto::Direct, + "built-in global aindex directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&home_dir.join(".aindex/.tnmsc.json")), + ProtectionModeDto::Direct, + "built-in global config file", + "built-in-config", + None, + ), + ] +} + +fn collect_workspace_reserved_rules( + workspace_dir: &str, + project_roots: &[String], + include_reserved_workspace_content_roots: bool, +) -> Vec { + let workspace_dir = path_to_string(&resolve_absolute_path(workspace_dir)); + let mut rules = vec![ + create_protected_rule( + &workspace_dir, + ProtectionModeDto::Direct, + "workspace root", + "workspace-reserved", + None, + ), + create_protected_rule( + &path_to_string(&resolve_absolute_path(&format!("{workspace_dir}/aindex"))), + ProtectionModeDto::Direct, + "reserved workspace aindex root", + "workspace-reserved", + None, + ), + create_protected_rule( + &path_to_string(&resolve_absolute_path(&format!( + "{workspace_dir}/knowladge" + ))), + ProtectionModeDto::Direct, + "reserved workspace knowladge root", + "workspace-reserved", + None, + ), + ]; + + for project_root in project_roots { + rules.push(create_protected_rule( + project_root, + ProtectionModeDto::Direct, + "workspace project root", + "workspace-project-root", + None, + )); + } + + if include_reserved_workspace_content_roots { + rules.push(create_protected_rule( + &format!("{workspace_dir}/aindex/dist/**/*.mdx"), + ProtectionModeDto::Direct, + "reserved workspace aindex dist mdx files", + "workspace-reserved", + Some(ProtectionRuleMatcherDto::Glob), + )); + for series_name in ["app", "ext", "arch", "softwares"] { + rules.push(create_protected_rule( + &format!("{workspace_dir}/aindex/{series_name}/**/*.mdx"), + ProtectionModeDto::Direct, + format!("reserved workspace aindex {series_name} mdx files"), + "workspace-reserved", + Some(ProtectionRuleMatcherDto::Glob), + )); + } + } + + rules +} + +fn create_guard( + snapshot: &CleanupSnapshot, + rules: &[ProtectedRuleDto], +) -> Result { + let mut all_rules = collect_built_in_dangerous_path_rules(); + all_rules.extend(collect_workspace_reserved_rules( + &snapshot.workspace_dir, + &snapshot.project_roots, + true, + )); + + if let Some(aindex_dir) = snapshot.aindex_dir.as_ref() { + all_rules.push(create_protected_rule( + aindex_dir, + ProtectionModeDto::Direct, + "resolved aindex root", + "aindex-root", + None, + )); + } + + all_rules.extend_from_slice(rules); + let compiled_rules = dedupe_and_compile_rules(&expand_protected_rules(&all_rules)?); + + Ok(ProtectedDeletionGuard { compiled_rules }) +} + +fn is_rule_match(target_key: &str, rule_key: &str, protection_mode: ProtectionModeDto) -> bool { + match protection_mode { + ProtectionModeDto::Direct => is_same_or_child_path(rule_key, target_key), + ProtectionModeDto::Recursive => { + is_same_or_child_path(target_key, rule_key) + || is_same_or_child_path(rule_key, target_key) + } + } +} + +fn select_more_specific_rule( + candidate: &CompiledProtectedRule, + current: Option<&CompiledProtectedRule>, +) -> CompiledProtectedRule { + let Some(current) = current else { + return candidate.clone(); + }; + + if candidate.specificity != current.specificity { + return if candidate.specificity > current.specificity { + candidate.clone() + } else { + current.clone() + }; + } + + if candidate.protection_mode != current.protection_mode { + return if candidate.protection_mode == ProtectionModeDto::Recursive { + candidate.clone() + } else { + current.clone() + }; + } + + if candidate.path < current.path { + candidate.clone() + } else { + current.clone() + } +} + +fn get_protected_path_violation( + target_path: &str, + guard: &ProtectedDeletionGuard, +) -> Option { + let absolute_target_path = path_to_string(&resolve_absolute_path(target_path)); + let target_keys = build_comparison_keys(&absolute_target_path); + let mut matched_rule: Option = None; + + for rule in &guard.compiled_rules { + let mut did_match = false; + for target_key in &target_keys { + for rule_key in &rule.comparison_keys { + if !is_rule_match(target_key, rule_key, rule.protection_mode) { + continue; + } + + matched_rule = Some(select_more_specific_rule(rule, matched_rule.as_ref())); + did_match = true; + break; + } + if did_match { + break; + } + } + } + + matched_rule.map(|rule| ProtectedPathViolationDto { + target_path: absolute_target_path, + protected_path: rule.path, + protection_mode: rule.protection_mode, + reason: rule.reason, + source: rule.source, + }) +} + +fn partition_deletion_targets(paths: &[String], guard: &ProtectedDeletionGuard) -> PartitionResult { + let mut safe_paths = Vec::new(); + let mut violations = Vec::new(); + + for target_path in paths { + if let Some(violation) = get_protected_path_violation(target_path, guard) { + violations.push(violation); + } else { + safe_paths.push(path_to_string(&resolve_absolute_path(target_path))); + } + } + + safe_paths.sort(); + violations.sort_by(|a, b| a.target_path.cmp(&b.target_path)); + + PartitionResult { + safe_paths, + violations, + } +} + +fn compact_deletion_targets(files: &[String], dirs: &[String]) -> (Vec, Vec) { + let files_by_key = files + .iter() + .map(|file_path| { + let resolved = path_to_string(&resolve_absolute_path(file_path)); + (resolved.clone(), resolved) + }) + .collect::>(); + let dirs_by_key = dirs + .iter() + .map(|dir_path| { + let resolved = path_to_string(&resolve_absolute_path(dir_path)); + (resolved.clone(), resolved) + }) + .collect::>(); + + let mut sorted_dir_entries = dirs_by_key.into_iter().collect::>(); + sorted_dir_entries + .sort_by(|(left_key, _), (right_key, _)| left_key.len().cmp(&right_key.len())); + + let mut compacted_dirs: HashMap = HashMap::new(); + for (dir_key, dir_path) in sorted_dir_entries { + let covered_by_parent = compacted_dirs + .keys() + .any(|existing_parent_key| is_same_or_child_path(&dir_key, existing_parent_key)); + if !covered_by_parent { + compacted_dirs.insert(dir_key, dir_path); + } + } + + let mut compacted_files = Vec::new(); + for (file_key, file_path) in files_by_key { + let covered_by_dir = compacted_dirs + .keys() + .any(|dir_key| is_same_or_child_path(&file_key, dir_key)); + if !covered_by_dir { + compacted_files.push(file_path); + } + } + + compacted_files.sort(); + let mut compacted_dir_paths = compacted_dirs.into_values().collect::>(); + compacted_dir_paths.sort(); + + (compacted_files, compacted_dir_paths) +} + +fn should_skip_empty_directory_tree(workspace_dir: &str, current_dir: &str) -> bool { + if current_dir == workspace_dir { + return false; + } + + Path::new(current_dir) + .file_name() + .and_then(|value| value.to_str()) + .is_some_and(|basename| EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES.contains(&basename)) +} + +/// Check if a directory path should be excluded from empty-directory scan +/// because it matches a user-supplied glob from aindex.config.ts. +fn matches_empty_dir_exclude_globs( + dir_path: &Path, + workspace_dir: &Path, + absolute_exclude_set: &Option, + relative_exclude_set: &Option, +) -> bool { + let absolute_match = absolute_exclude_set + .as_ref() + .is_some_and(|globs| globs.is_match(path_to_glob_string(dir_path))); + if absolute_match { + return true; + } + + relative_exclude_set.as_ref().is_some_and(|globs| { + normalize_workspace_relative_path(dir_path, workspace_dir) + .is_some_and(|relative_path| globs.is_match(relative_path)) + }) +} + +fn collect_empty_workspace_directories( + current_dir: &Path, + workspace_dir: &Path, + files_to_delete: &HashSet, + dirs_to_delete: &HashSet, + empty_dirs_to_delete: &mut BTreeSet, + empty_dir_absolute_exclude: &Option, + empty_dir_relative_exclude: &Option, +) -> bool { + let current_dir = normalize_path(current_dir); + let current_dir_string = path_to_string(¤t_dir); + let workspace_dir_string = path_to_string(workspace_dir); + + if dirs_to_delete.contains(¤t_dir_string) { + return true; + } + + if should_skip_empty_directory_tree(&workspace_dir_string, ¤t_dir_string) { + return false; + } + + if matches_empty_dir_exclude_globs( + ¤t_dir, + workspace_dir, + empty_dir_absolute_exclude, + empty_dir_relative_exclude, + ) { + return false; + } + + let Ok(entries) = fs::read_dir(¤t_dir) else { + return false; + }; + + let mut has_retained_entries = false; + + for entry in entries { + let Ok(entry) = entry else { + has_retained_entries = true; + continue; + }; + + let entry_path = normalize_path(&entry.path()); + let entry_string = path_to_string(&entry_path); + + if dirs_to_delete.contains(&entry_string) { + continue; + } + + let Ok(file_type) = entry.file_type() else { + has_retained_entries = true; + continue; + }; + + if file_type.is_dir() { + if should_skip_empty_directory_tree(&workspace_dir_string, &entry_string) { + has_retained_entries = true; + continue; + } + + if matches_empty_dir_exclude_globs( + &entry_path, + workspace_dir, + empty_dir_absolute_exclude, + empty_dir_relative_exclude, + ) { + has_retained_entries = true; + continue; + } + + if collect_empty_workspace_directories( + &entry_path, + workspace_dir, + files_to_delete, + dirs_to_delete, + empty_dirs_to_delete, + empty_dir_absolute_exclude, + empty_dir_relative_exclude, + ) { + empty_dirs_to_delete.insert(entry_string); + continue; + } + + has_retained_entries = true; + continue; + } + + if files_to_delete.contains(&entry_string) { + continue; + } + + has_retained_entries = true; + } + + !has_retained_entries +} + +fn plan_workspace_empty_directory_cleanup( + workspace_dir: &str, + files_to_delete: &[String], + dirs_to_delete: &[String], + guard: &ProtectedDeletionGuard, + empty_dir_absolute_exclude: &Option, + empty_dir_relative_exclude: &Option, +) -> (Vec, Vec) { + let workspace_dir = resolve_absolute_path(workspace_dir); + let files_to_delete = files_to_delete + .iter() + .map(|path| path_to_string(&resolve_absolute_path(path))) + .collect::>(); + let dirs_to_delete = dirs_to_delete + .iter() + .map(|path| path_to_string(&resolve_absolute_path(path))) + .collect::>(); + let mut discovered_empty_dirs = BTreeSet::new(); + + collect_empty_workspace_directories( + &workspace_dir, + &workspace_dir, + &files_to_delete, + &dirs_to_delete, + &mut discovered_empty_dirs, + empty_dir_absolute_exclude, + empty_dir_relative_exclude, + ); + + let mut safe_empty_dirs = Vec::new(); + let mut violations = Vec::new(); + + for empty_dir in discovered_empty_dirs { + if let Some(violation) = get_protected_path_violation(&empty_dir, guard) { + violations.push(violation); + } else { + safe_empty_dirs.push(empty_dir); + } + } + + safe_empty_dirs.sort(); + violations.sort_by(|a, b| a.target_path.cmp(&b.target_path)); + + (safe_empty_dirs, violations) +} + +fn detect_cleanup_protection_conflicts( + output_path_owners: &HashMap>, + guard: &ProtectedDeletionGuard, +) -> Vec { + let mut conflicts = Vec::new(); + + for (output_path, output_plugins) in output_path_owners { + let output_keys = build_comparison_keys(output_path) + .into_iter() + .collect::>(); + + for rule in &guard.compiled_rules { + let is_exact_match = rule + .comparison_keys + .iter() + .any(|rule_key| output_keys.contains(rule_key)); + if !is_exact_match { + continue; + } + + for output_plugin in output_plugins { + conflicts.push(CleanupProtectionConflictDto { + output_path: output_path.clone(), + output_plugin: output_plugin.clone(), + protected_path: rule.path.clone(), + protection_mode: rule.protection_mode, + protected_by: rule.source.clone(), + reason: rule.reason.clone(), + }); + } + } + } + + conflicts.sort_by(|a, b| { + a.output_path + .cmp(&b.output_path) + .then_with(|| a.protected_path.cmp(&b.protected_path)) + }); + conflicts +} + +#[derive(Debug, Clone)] +struct ProtectedGlobCleanupTarget { + path: String, + protection_mode: ProtectionModeDto, + reason: String, + source: String, +} + +#[derive(Debug, Clone)] +struct DeleteGlobCleanupTarget { + target: CleanupTargetDto, +} + +fn default_protection_mode_for_target(target: &CleanupTargetDto) -> ProtectionModeDto { + target.protection_mode.unwrap_or(match target.kind { + CleanupTargetKindDto::File => ProtectionModeDto::Direct, + CleanupTargetKindDto::Directory | CleanupTargetKindDto::Glob => { + ProtectionModeDto::Recursive + } + }) +} + +pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { + let mut delete_files = HashSet::new(); + let mut delete_dirs = HashSet::new(); + let mut protected_rules = snapshot.protected_rules.clone(); + let mut exclude_scan_globs = BTreeSet::from_iter( + DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS + .iter() + .map(|value| (*value).to_string()), + ); + let mut output_path_owners = HashMap::>::new(); + let mut protected_glob_targets = Vec::::new(); + let mut delete_glob_targets = Vec::::new(); + + for plugin_snapshot in &snapshot.plugin_snapshots { + for output in &plugin_snapshot.outputs { + let resolved_output_path = path_to_string(&resolve_absolute_path(output)); + delete_files.insert(resolved_output_path.clone()); + output_path_owners + .entry(resolved_output_path) + .or_default() + .push(plugin_snapshot.plugin_name.clone()); + } + + for ignore_glob in &plugin_snapshot.cleanup.exclude_scan_globs { + exclude_scan_globs.insert(normalize_glob_pattern(ignore_glob)); + } + } + + let ignore_globs = exclude_scan_globs.iter().cloned().collect::>(); + + for plugin_snapshot in &snapshot.plugin_snapshots { + for target in &plugin_snapshot.cleanup.protect { + if target.kind == CleanupTargetKindDto::Glob { + protected_glob_targets.push(ProtectedGlobCleanupTarget { + path: target.path.clone(), + protection_mode: default_protection_mode_for_target(target), + reason: target + .label + .as_ref() + .map(|label| format!("plugin cleanup protect declaration ({label})")) + .unwrap_or_else(|| "plugin cleanup protect declaration".to_string()), + source: format!("plugin-cleanup-protect:{}", plugin_snapshot.plugin_name), + }); + continue; + } + + let reason = target + .label + .as_ref() + .map(|label| format!("plugin cleanup protect declaration ({label})")) + .unwrap_or_else(|| "plugin cleanup protect declaration".to_string()); + protected_rules.push(create_protected_rule( + &target.path, + default_protection_mode_for_target(target), + reason, + format!("plugin-cleanup-protect:{}", plugin_snapshot.plugin_name), + None, + )); + } + + for target in &plugin_snapshot.cleanup.delete { + if target.kind == CleanupTargetKindDto::Glob { + delete_glob_targets.push(DeleteGlobCleanupTarget { + target: target.clone(), + }); + continue; + } + + match target.kind { + CleanupTargetKindDto::Directory => { + delete_dirs.insert(path_to_string(&resolve_absolute_path(&target.path))); + } + CleanupTargetKindDto::File => { + delete_files.insert(path_to_string(&resolve_absolute_path(&target.path))); + } + CleanupTargetKindDto::Glob => {} + } + } + } + + // Batch all glob patterns (both protected and delete) into a single planner + // to minimize directory walks. This is the key performance optimization. + let mut planner = BatchedGlobPlanner::new(&ignore_globs)?; + + // Add protected glob targets + for (index, target) in protected_glob_targets.iter().enumerate() { + planner.add_pattern( + &target.path, + true, // is_protected + index, + Vec::new(), // protected globs don't use exclude_basenames + ); + } + + // Add delete glob targets + for (index, target) in delete_glob_targets.iter().enumerate() { + planner.add_pattern( + &target.target.path, + false, // is_delete + index, + target.target.exclude_basenames.clone(), + ); + } + + // Execute the batched glob expansion + let (protected_results, delete_results) = planner.execute()?; + + // Fan protected glob results back to their targets + for (target_index, matched_paths) in protected_results { + let target = &protected_glob_targets[target_index]; + for matched_path in matched_paths { + protected_rules.push(create_protected_rule( + &matched_path, + target.protection_mode, + target.reason.clone(), + target.source.clone(), + None, + )); + } + } + + // Fan delete glob results back to their targets + for (_target_index, matched_paths) in delete_results { + for matched_path in matched_paths { + let Ok(metadata) = fs::symlink_metadata(&matched_path) else { + continue; + }; + if metadata.is_dir() { + delete_dirs.insert(path_to_string(&resolve_absolute_path(&matched_path))); + } else { + delete_files.insert(path_to_string(&resolve_absolute_path(&matched_path))); + } + } + } + + let guard = create_guard(&snapshot, &protected_rules)?; + let conflicts = detect_cleanup_protection_conflicts(&output_path_owners, &guard); + if !conflicts.is_empty() { + return Ok(CleanupPlan { + files_to_delete: Vec::new(), + dirs_to_delete: Vec::new(), + empty_dirs_to_delete: Vec::new(), + violations: Vec::new(), + conflicts, + excluded_scan_globs: ignore_globs, + }); + } + + let file_partition = + partition_deletion_targets(&delete_files.into_iter().collect::>(), &guard); + let dir_partition = + partition_deletion_targets(&delete_dirs.into_iter().collect::>(), &guard); + let (files_to_delete, dirs_to_delete) = + compact_deletion_targets(&file_partition.safe_paths, &dir_partition.safe_paths); + let empty_dir_absolute_exclude_set = build_globset( + &snapshot + .empty_dir_exclude_globs + .iter() + .map(|pattern| { + if expand_home_path(pattern).is_absolute() { + normalize_glob_pattern(pattern) + } else { + path_to_glob_string(&resolve_absolute_path(&format!( + "{}/{}", + snapshot.workspace_dir, pattern + ))) + } + }) + .collect::>(), + )?; + let empty_dir_relative_exclude_set = build_globset( + &snapshot + .empty_dir_exclude_globs + .iter() + .filter(|pattern| !expand_home_path(pattern).is_absolute()) + .map(|pattern| normalize_relative_glob_pattern(pattern)) + .collect::>(), + )?; + let (empty_dirs_to_delete, empty_dir_violations) = plan_workspace_empty_directory_cleanup( + &snapshot.workspace_dir, + &files_to_delete, + &dirs_to_delete, + &guard, + &empty_dir_absolute_exclude_set, + &empty_dir_relative_exclude_set, + ); + + let mut violations = file_partition.violations; + violations.extend(dir_partition.violations); + violations.extend(empty_dir_violations); + violations.sort_by(|a, b| a.target_path.cmp(&b.target_path)); + + Ok(CleanupPlan { + files_to_delete, + dirs_to_delete, + empty_dirs_to_delete, + violations, + conflicts: Vec::new(), + excluded_scan_globs: ignore_globs, + }) +} + +pub fn perform_cleanup(snapshot: CleanupSnapshot) -> Result { + let plan = plan_cleanup(snapshot)?; + if !plan.conflicts.is_empty() || !plan.violations.is_empty() { + return Ok(CleanupExecutionResultDto { + deleted_files: 0, + deleted_dirs: 0, + errors: Vec::new(), + violations: plan.violations, + conflicts: plan.conflicts, + files_to_delete: plan.files_to_delete, + dirs_to_delete: plan.dirs_to_delete, + empty_dirs_to_delete: plan.empty_dirs_to_delete, + excluded_scan_globs: plan.excluded_scan_globs, + }); + } + + let delete_result = desk_paths::delete_targets(&plan.files_to_delete, &plan.dirs_to_delete); + let empty_dir_result = desk_paths::delete_empty_directories(&plan.empty_dirs_to_delete); + let mut errors = delete_result + .file_errors + .into_iter() + .map(|error| CleanupErrorDto { + path: error.path, + kind: CleanupErrorKindDto::File, + error: error.error, + }) + .collect::>(); + errors.extend( + delete_result + .dir_errors + .into_iter() + .map(|error| CleanupErrorDto { + path: error.path, + kind: CleanupErrorKindDto::Directory, + error: error.error, + }), + ); + errors.extend( + empty_dir_result + .errors + .into_iter() + .map(|error| CleanupErrorDto { + path: error.path, + kind: CleanupErrorKindDto::Directory, + error: error.error, + }), + ); + + Ok(CleanupExecutionResultDto { + deleted_files: delete_result.deleted_files.len(), + deleted_dirs: delete_result.deleted_dirs.len() + empty_dir_result.deleted_paths.len(), + errors, + violations: Vec::new(), + conflicts: Vec::new(), + files_to_delete: plan.files_to_delete, + dirs_to_delete: plan.dirs_to_delete, + empty_dirs_to_delete: plan.empty_dirs_to_delete, + excluded_scan_globs: plan.excluded_scan_globs, + }) +} + +#[cfg(feature = "napi")] +mod napi_binding { + use napi_derive::napi; + + use super::{CleanupExecutionResultDto, CleanupPlan, CleanupSnapshot}; + + fn parse_snapshot(snapshot_json: String) -> napi::Result { + serde_json::from_str(&snapshot_json) + .map_err(|error| napi::Error::from_reason(error.to_string())) + } + + fn serialize_result(result: &T) -> napi::Result { + serde_json::to_string(result).map_err(|error| napi::Error::from_reason(error.to_string())) + } + + #[napi] + pub fn plan_cleanup(snapshot_json: String) -> napi::Result { + let snapshot = parse_snapshot(snapshot_json)?; + let result: CleanupPlan = + super::plan_cleanup(snapshot).map_err(napi::Error::from_reason)?; + serialize_result(&result) + } + + #[napi] + pub fn perform_cleanup(snapshot_json: String) -> napi::Result { + let snapshot = parse_snapshot(snapshot_json)?; + let result: CleanupExecutionResultDto = + super::perform_cleanup(snapshot).map_err(napi::Error::from_reason)?; + serialize_result(&result) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::tempdir; + + fn empty_snapshot(workspace_dir: &Path) -> CleanupSnapshot { + CleanupSnapshot { + workspace_dir: path_to_string(workspace_dir), + aindex_dir: Some(path_to_string(&workspace_dir.join("aindex"))), + project_roots: vec![path_to_string(&workspace_dir.join("project-a"))], + protected_rules: Vec::new(), + plugin_snapshots: Vec::new(), + empty_dir_exclude_globs: Vec::new(), + } + } + + fn single_plugin_snapshot( + workspace_dir: &Path, + outputs: Vec, + cleanup: CleanupDeclarationsDto, + ) -> CleanupSnapshot { + CleanupSnapshot { + plugin_snapshots: vec![PluginCleanupSnapshotDto { + plugin_name: "MockOutputPlugin".to_string(), + outputs, + cleanup, + }], + ..empty_snapshot(workspace_dir) + } + } + + #[test] + fn detects_exact_output_protection_conflicts() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let output_path = workspace_dir.join("project-a/AGENTS.md"); + fs::create_dir_all(output_path.parent().unwrap()).unwrap(); + fs::write(&output_path, "# output").unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![path_to_string(&output_path)], + CleanupDeclarationsDto { + protect: vec![CleanupTargetDto { + path: path_to_string(&output_path), + kind: CleanupTargetKindDto::File, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert_eq!(plan.conflicts.len(), 1); + assert!(plan.files_to_delete.is_empty()); + assert!(plan.dirs_to_delete.is_empty()); + } + + #[test] + fn expands_delete_globs_and_respects_excluded_basenames() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let skills_dir = workspace_dir.join(".codex/skills"); + let system_dir = skills_dir.join(".system"); + let stale_dir = skills_dir.join("legacy"); + fs::create_dir_all(&system_dir).unwrap(); + fs::create_dir_all(&stale_dir).unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&skills_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: vec![".system".to_string()], + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(plan.dirs_to_delete.contains(&path_to_string(&stale_dir))); + assert!(!plan.dirs_to_delete.contains(&path_to_string(&system_dir))); + } + + #[test] + fn preserves_direct_vs_recursive_guard_behavior() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let direct_dir = workspace_dir.join("project-a"); + let recursive_dir = workspace_dir.join("aindex/dist"); + let direct_file = direct_dir.join("AGENTS.md"); + let recursive_file = recursive_dir.join("commands/demo.mdx"); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![ + path_to_string(&direct_file), + path_to_string(&recursive_file), + ], + CleanupDeclarationsDto { + protect: vec![ + CleanupTargetDto { + path: path_to_string(&direct_dir), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: Some(ProtectionModeDto::Direct), + scope: None, + label: None, + }, + CleanupTargetDto { + path: path_to_string(&recursive_dir), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: Some(ProtectionModeDto::Recursive), + scope: None, + label: None, + }, + ], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(plan.files_to_delete.contains(&path_to_string(&direct_file))); + assert!(plan + .violations + .iter() + .any(|violation| violation.target_path == path_to_string(&recursive_file))); + } + + #[test] + fn blocks_reserved_workspace_mdx_descendants() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let protected_file = workspace_dir.join("aindex/dist/commands/demo.mdx"); + fs::create_dir_all(protected_file.parent().unwrap()).unwrap(); + fs::write(&protected_file, "# demo").unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&workspace_dir.join("aindex/dist")), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(plan.dirs_to_delete.is_empty()); + assert_eq!(plan.violations.len(), 1); + assert_eq!( + plan.violations[0].protected_path, + path_to_string(&protected_file) + ); + } + + #[cfg(unix)] + #[test] + fn matches_symlink_realpaths_against_protected_paths() { + use std::os::unix::fs::symlink; + + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let symlink_path = temp_dir.path().join("workspace-link"); + fs::create_dir_all(&workspace_dir).unwrap(); + symlink(&workspace_dir, &symlink_path).unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&symlink_path), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(plan.dirs_to_delete.is_empty()); + assert!(plan + .violations + .iter() + .any(|violation| violation.target_path == path_to_string(&symlink_path))); + } + + #[test] + fn compacts_nested_directory_targets() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let base_dir = workspace_dir.join(".claude"); + let rules_dir = base_dir.join("rules"); + let rule_file = rules_dir.join("demo.md"); + fs::create_dir_all(&rules_dir).unwrap(); + fs::write(&rule_file, "# demo").unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![path_to_string(&rule_file)], + CleanupDeclarationsDto { + delete: vec![ + CleanupTargetDto { + path: path_to_string(&base_dir), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }, + CleanupTargetDto { + path: path_to_string(&rules_dir), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }, + CleanupTargetDto { + path: path_to_string(&rule_file), + kind: CleanupTargetKindDto::File, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }, + ], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert_eq!(plan.dirs_to_delete, vec![path_to_string(&base_dir)]); + assert!(plan.files_to_delete.is_empty()); + } + + #[test] + fn plans_workspace_empty_directories_while_skipping_excluded_trees() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let source_leaf_dir = workspace_dir.join("source/empty/leaf"); + let source_keep_file = workspace_dir.join("source/keep.md"); + let dist_empty_dir = workspace_dir.join("dist/ghost"); + let node_modules_empty_dir = workspace_dir.join("node_modules/pkg/ghost"); + let git_empty_dir = workspace_dir.join(".git/objects/info"); + + fs::create_dir_all(&source_leaf_dir).unwrap(); + fs::create_dir_all(source_keep_file.parent().unwrap()).unwrap(); + fs::create_dir_all(&dist_empty_dir).unwrap(); + fs::create_dir_all(&node_modules_empty_dir).unwrap(); + fs::create_dir_all(&git_empty_dir).unwrap(); + fs::write(&source_keep_file, "# keep").unwrap(); + + let snapshot = + single_plugin_snapshot(&workspace_dir, vec![], CleanupDeclarationsDto::default()); + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(plan.files_to_delete.is_empty()); + assert!(plan.dirs_to_delete.is_empty()); + assert_eq!( + plan.empty_dirs_to_delete, + vec![ + path_to_string(&workspace_dir.join("source/empty")), + path_to_string(&source_leaf_dir), + ] + ); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&workspace_dir))); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&dist_empty_dir))); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&node_modules_empty_dir))); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&git_empty_dir))); + } + + #[test] + fn performs_cleanup_and_prunes_workspace_empty_directories() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let output_file = workspace_dir.join("generated/AGENTS.md"); + let empty_leaf_dir = workspace_dir.join("scratch/empty/leaf"); + let retained_scratch_file = workspace_dir.join("scratch/keep.md"); + + fs::create_dir_all(output_file.parent().unwrap()).unwrap(); + fs::create_dir_all(&empty_leaf_dir).unwrap(); + fs::create_dir_all(retained_scratch_file.parent().unwrap()).unwrap(); + fs::write(&output_file, "# generated").unwrap(); + fs::write(&retained_scratch_file, "# keep").unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![path_to_string(&output_file)], + CleanupDeclarationsDto::default(), + ); + + let result = perform_cleanup(snapshot).unwrap(); + assert_eq!(result.deleted_files, 1); + assert_eq!(result.deleted_dirs, 3); + assert!(result.errors.is_empty()); + assert!(!output_file.exists()); + assert!(!workspace_dir.join("generated").exists()); + assert!(!empty_leaf_dir.exists()); + assert!(!workspace_dir.join("scratch/empty").exists()); + assert!(workspace_dir.join("scratch").exists()); + } + + #[test] + fn preserves_empty_directories_excluded_by_workspace_relative_globs() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let excluded_leaf_dir = workspace_dir.join("volumes/cache/leaf"); + let regular_leaf_dir = workspace_dir.join("scratch/empty/leaf"); + + fs::create_dir_all(&excluded_leaf_dir).unwrap(); + fs::create_dir_all(®ular_leaf_dir).unwrap(); + + let mut snapshot = + single_plugin_snapshot(&workspace_dir, vec![], CleanupDeclarationsDto::default()); + snapshot.empty_dir_exclude_globs = vec!["volumes/**".to_string()]; + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&workspace_dir.join("volumes/cache")))); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&excluded_leaf_dir))); + assert!(plan + .empty_dirs_to_delete + .contains(&path_to_string(&workspace_dir.join("scratch/empty")))); + assert!(plan + .empty_dirs_to_delete + .contains(&path_to_string(®ular_leaf_dir))); + } + + #[test] + fn skips_reserved_volume_trees_during_empty_directory_scan() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let volumes_leaf_dir = workspace_dir.join("volumes/cache/leaf"); + let hidden_volumes_leaf_dir = workspace_dir.join(".volumes/cache/leaf"); + let regular_leaf_dir = workspace_dir.join("scratch/empty/leaf"); + + fs::create_dir_all(&volumes_leaf_dir).unwrap(); + fs::create_dir_all(&hidden_volumes_leaf_dir).unwrap(); + fs::create_dir_all(®ular_leaf_dir).unwrap(); + + let snapshot = + single_plugin_snapshot(&workspace_dir, vec![], CleanupDeclarationsDto::default()); + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&workspace_dir.join("volumes/cache")))); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&volumes_leaf_dir))); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&workspace_dir.join(".volumes/cache")))); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&hidden_volumes_leaf_dir))); + assert!(plan + .empty_dirs_to_delete + .contains(&path_to_string(&workspace_dir.join("scratch/empty")))); + assert!(plan + .empty_dirs_to_delete + .contains(&path_to_string(®ular_leaf_dir))); + } + + #[test] + fn batched_glob_planner_handles_multiple_globs_sharing_root() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let cache_dir = workspace_dir.join("cache"); + let temp_dir_path = workspace_dir.join("temp"); + let logs_dir = workspace_dir.join("logs"); + + // Create test directories + fs::create_dir_all(cache_dir.join("sub1")).unwrap(); + fs::create_dir_all(cache_dir.join("sub2")).unwrap(); + fs::create_dir_all(temp_dir_path.join("tmp1")).unwrap(); + fs::create_dir_all(logs_dir.join("2024")).unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![ + CleanupTargetDto { + path: path_to_string(&cache_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: Some("cache-cleanup".to_string()), + }, + CleanupTargetDto { + path: path_to_string(&temp_dir_path.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: Some("temp-cleanup".to_string()), + }, + ], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + // Should match subdirectories under cache/ and temp/ but not logs/ + assert_eq!(plan.dirs_to_delete.len(), 3); + assert!(plan + .dirs_to_delete + .contains(&path_to_string(&cache_dir.join("sub1")))); + assert!(plan + .dirs_to_delete + .contains(&path_to_string(&cache_dir.join("sub2")))); + assert!(plan + .dirs_to_delete + .contains(&path_to_string(&temp_dir_path.join("tmp1")))); + assert!(!plan + .dirs_to_delete + .contains(&path_to_string(&logs_dir.join("2024")))); + } + + #[test] + fn batched_glob_planner_handles_mixed_protect_and_delete_globs() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let data_dir = workspace_dir.join("data"); + let keep_dir = data_dir.join("keep"); + let delete_dir = data_dir.join("delete"); + + fs::create_dir_all(&keep_dir).unwrap(); + fs::create_dir_all(&delete_dir).unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&data_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + protect: vec![CleanupTargetDto { + // Protect the keep_dir itself using Recursive mode to protect its descendants too + path: path_to_string(&keep_dir), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: Some(ProtectionModeDto::Recursive), + scope: None, + label: Some("protect-keep".to_string()), + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + // delete_dir should be deleted, keep_dir should NOT be deleted (protected by Directory target) + assert!(plan.dirs_to_delete.contains(&path_to_string(&delete_dir))); + assert!(!plan.dirs_to_delete.contains(&path_to_string(&keep_dir))); + // keep_dir is protected, so attempting to delete it is a violation + assert!(plan + .violations + .iter() + .any(|v| v.target_path == path_to_string(&keep_dir))); + } + + #[test] + fn batched_glob_planner_respects_exclude_basenames() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let build_dir = workspace_dir.join("build"); + let release_dir = build_dir.join("release"); + let debug_dir = build_dir.join("debug"); + let keep_dir = build_dir.join(".gitkeep"); + + fs::create_dir_all(&release_dir).unwrap(); + fs::create_dir_all(&debug_dir).unwrap(); + fs::create_dir_all(&keep_dir).unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&build_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: vec![".gitkeep".to_string()], + protection_mode: None, + scope: None, + label: Some("build-cleanup".to_string()), + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + // Should delete release and debug, but not .gitkeep + assert!(plan.dirs_to_delete.contains(&path_to_string(&release_dir))); + assert!(plan.dirs_to_delete.contains(&path_to_string(&debug_dir))); + assert!(!plan.dirs_to_delete.contains(&path_to_string(&keep_dir))); + } + + #[test] + fn batched_glob_planner_produces_stable_sorted_output() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let outputs_dir = workspace_dir.join("outputs"); + + // Create directories in non-alphabetical order + let dirs = vec!["zeta", "alpha", "beta", "gamma", "delta"]; + for dir in &dirs { + fs::create_dir_all(outputs_dir.join(dir)).unwrap(); + } + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&outputs_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + + // Verify output is sorted + let expected_order: Vec = dirs + .iter() + .map(|d| path_to_string(&outputs_dir.join(d))) + .collect::>() + .into_iter() + .collect::>() + .into_iter() + .collect(); + + assert_eq!(plan.dirs_to_delete, expected_order); + + // Run multiple times to ensure stability + for _ in 0..3 { + let plan2 = plan_cleanup(single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&outputs_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + )) + .unwrap(); + assert_eq!(plan.dirs_to_delete, plan2.dirs_to_delete); + } + } + + #[test] + fn batched_glob_planner_handles_file_vs_directory_classification() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let mixed_dir = workspace_dir.join("mixed"); + let file_path = mixed_dir.join("file.txt"); + let dir_path = mixed_dir.join("subdir"); + + fs::create_dir_all(&dir_path).unwrap(); + fs::write(&file_path, "content").unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&mixed_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + + // Files should be in files_to_delete, dirs in dirs_to_delete + assert!(plan.files_to_delete.contains(&path_to_string(&file_path))); + assert!(plan.dirs_to_delete.contains(&path_to_string(&dir_path))); + } + + #[test] + fn batched_glob_planner_handles_cross_plugin_glob_batching() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let project_a = workspace_dir.join("project-a/temp"); + let project_b = workspace_dir.join("project-b/temp"); + + fs::create_dir_all(project_a.join("old")).unwrap(); + fs::create_dir_all(project_b.join("cache")).unwrap(); + + // Multi-plugin snapshot to test cross-plugin batching + let snapshot = CleanupSnapshot { + workspace_dir: path_to_string(&workspace_dir), + aindex_dir: Some(path_to_string(&workspace_dir.join("aindex"))), + project_roots: vec![ + path_to_string(&workspace_dir.join("project-a")), + path_to_string(&workspace_dir.join("project-b")), + ], + protected_rules: Vec::new(), + plugin_snapshots: vec![ + PluginCleanupSnapshotDto { + plugin_name: "PluginA".to_string(), + outputs: vec![], + cleanup: CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&project_a.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + }, + PluginCleanupSnapshotDto { + plugin_name: "PluginB".to_string(), + outputs: vec![], + cleanup: CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&project_b.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + }, + ], + empty_dir_exclude_globs: Vec::new(), + }; + + let plan = plan_cleanup(snapshot).unwrap(); + + // Both plugins' globs should be resolved + assert_eq!(plan.dirs_to_delete.len(), 2); + assert!(plan + .dirs_to_delete + .contains(&path_to_string(&project_a.join("old")))); + assert!(plan + .dirs_to_delete + .contains(&path_to_string(&project_b.join("cache")))); + } +} diff --git a/sdk/src/core/config/mod.rs b/sdk/src/core/config/mod.rs new file mode 100644 index 00000000..6a1181b8 --- /dev/null +++ b/sdk/src/core/config/mod.rs @@ -0,0 +1,1513 @@ +#![deny(clippy::all)] + +//! Configuration loading, merging, and validation. +//! +//! Reads only `~/.aindex/.tnmsc.json` (global), +//! then merges with defaults. + +pub mod series_filter; + +use std::collections::HashMap; +use std::env; +use std::fs; +use std::path::{Path, PathBuf}; + +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +use crate::diagnostic_helpers::{diagnostic, line, optional_details}; +use tnmsc_logger::{Logger, create_logger}; + +// --------------------------------------------------------------------------- +// Constants +// --------------------------------------------------------------------------- + +pub const DEFAULT_CONFIG_FILE_NAME: &str = ".tnmsc.json"; +pub const DEFAULT_GLOBAL_CONFIG_DIR: &str = ".aindex"; +pub const DEFAULT_WSL_WINDOWS_USERS_ROOT: &str = "/mnt/c/Users"; + +fn path_details(path: &Path) -> Option> { + optional_details(serde_json::json!({ + "path": path.to_string_lossy() + })) +} + +fn path_error_details(path: &Path, error: &str) -> Option> { + optional_details(serde_json::json!({ + "path": path.to_string_lossy(), + "error": error + })) +} + +// --------------------------------------------------------------------------- +// Types — mirrors TS ConfigTypes.schema.ts +// --------------------------------------------------------------------------- + +/// A source/dist path pair. Both paths are relative to the aindex project root. +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +pub struct DirPair { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub src: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub dist: Option, +} + +impl DirPair { + fn merge(a: &Option, b: &Option) -> Option { + match (a, b) { + (None, None) => None, + (Some(v), None) => Some(v.clone()), + (None, Some(v)) => Some(v.clone()), + (Some(base), Some(over)) => Some(DirPair { + src: over.src.clone().or_else(|| base.src.clone()), + dist: over.dist.clone().or_else(|| base.dist.clone()), + }), + } + } +} + +/// Aindex configuration. +/// All paths are relative to `/`. +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct AindexConfig { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub dir: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub skills: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub commands: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub sub_agents: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub rules: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub global_prompt: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub workspace_prompt: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub app: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub ext: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub arch: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub softwares: Option, +} + +/// Per-plugin fast command series override options. +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct FastCommandSeriesPluginOverride { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub include_series_prefix: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub series_separator: Option, +} + +/// Fast command series configuration options. +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct FastCommandSeriesOptions { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub include_series_prefix: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub plugin_overrides: Option>, +} + +/// User profile information. Supports arbitrary key-value pairs. +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +pub struct UserProfile { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub name: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub username: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub gender: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub birthday: Option, + #[serde(flatten)] + pub extra: HashMap, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(untagged)] +pub enum StringOrStrings { + Single(String), + Multiple(Vec), +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct WindowsWsl2Options { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub instances: Option, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct WindowsOptions { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub wsl2: Option, +} + +/// User configuration file (.tnmsc.json). +/// All fields are optional — missing fields use default values. +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct UserConfigFile { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub version: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub workspace_dir: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub aindex: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub log_level: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub fast_command_series_options: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub profile: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub windows: Option, +} + +// --------------------------------------------------------------------------- +// Result types +// --------------------------------------------------------------------------- + +/// Result of loading a single config file. +#[derive(Debug, Clone)] +pub struct ConfigLoadResult { + pub config: UserConfigFile, + pub source: Option, + pub found: bool, +} + +/// Result of loading and merging all configurations. +#[derive(Debug, Clone)] +pub struct MergedConfigResult { + pub config: UserConfigFile, + pub sources: Vec, + pub found: bool, +} + +/// Validation result for global config. +#[derive(Debug, Clone)] +pub struct GlobalConfigValidationResult { + pub valid: bool, + pub exists: bool, + pub errors: Vec, + pub should_exit: bool, +} + +// --------------------------------------------------------------------------- +// Path helpers +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Default)] +pub struct RuntimeEnvironmentContext { + pub is_wsl: bool, + pub native_home_dir: Option, + pub effective_home_dir: Option, + pub selected_global_config_path: Option, + pub windows_users_root: PathBuf, +} + +fn home_dir() -> Option { + dirs::home_dir() +} + +fn normalize_posix_like_path(raw_path: &str) -> String { + let replaced = raw_path.replace('\\', "/"); + let has_root = replaced.starts_with('/'); + let mut components: Vec<&str> = Vec::new(); + + for component in replaced.split('/') { + if component.is_empty() || component == "." { + continue; + } + + if component == ".." { + if let Some(last_component) = components.last() + && *last_component != ".." + { + components.pop(); + continue; + } + + if !has_root { + components.push(component); + } + continue; + } + + components.push(component); + } + + let joined = components.join("/"); + if has_root { + if joined.is_empty() { + "/".to_string() + } else { + format!("/{joined}") + } + } else { + joined + } +} + +fn is_same_or_child_path(candidate_path: &str, parent_path: &str) -> bool { + let normalized_candidate = normalize_posix_like_path(candidate_path); + let normalized_parent = normalize_posix_like_path(parent_path); + + normalized_candidate == normalized_parent + || normalized_candidate.starts_with(&format!("{normalized_parent}/")) +} + +fn convert_windows_path_to_wsl(raw_path: &str) -> Option { + let bytes = raw_path.as_bytes(); + if bytes.len() < 3 + || !bytes[0].is_ascii_alphabetic() + || bytes[1] != b':' + || (bytes[2] != b'\\' && bytes[2] != b'/') + { + return None; + } + + let drive_letter = char::from(bytes[0]).to_ascii_lowercase(); + let relative_path = raw_path[2..] + .trim_start_matches(['\\', '/']) + .replace('\\', "/"); + let base_path = format!("/mnt/{drive_letter}"); + + if relative_path.is_empty() { + Some(PathBuf::from(base_path)) + } else { + Some(Path::new(&base_path).join(relative_path)) + } +} + +fn resolve_wsl_host_home_candidate(users_root: &Path, raw_path: Option<&str>) -> Option { + let raw_path = raw_path?.trim(); + if raw_path.is_empty() { + return None; + } + + let normalized_users_root = normalize_posix_like_path(&users_root.to_string_lossy()); + let candidate_paths = [ + convert_windows_path_to_wsl(raw_path) + .map(|candidate_path| normalize_posix_like_path(&candidate_path.to_string_lossy())), + Some(normalize_posix_like_path(raw_path)), + ]; + + for candidate_path in candidate_paths.into_iter().flatten() { + if is_same_or_child_path(&candidate_path, &normalized_users_root) { + return Some(PathBuf::from(candidate_path)); + } + } + + None +} + +fn resolve_preferred_wsl_host_home_dirs_for( + users_root: &Path, + userprofile: Option<&str>, + homedrive: Option<&str>, + homepath: Option<&str>, + home: Option<&str>, +) -> Vec { + let mut preferred_home_dirs: Vec = Vec::new(); + let combined_home_path = match (homedrive, homepath) { + (Some(drive), Some(home_path)) if !drive.is_empty() && !home_path.is_empty() => { + Some(format!("{drive}{home_path}")) + } + _ => None, + }; + + for candidate in [ + resolve_wsl_host_home_candidate(users_root, userprofile), + resolve_wsl_host_home_candidate(users_root, combined_home_path.as_deref()), + resolve_wsl_host_home_candidate(users_root, home), + ] + .into_iter() + .flatten() + { + if !preferred_home_dirs + .iter() + .any(|existing| existing == &candidate) + { + preferred_home_dirs.push(candidate); + } + } + + preferred_home_dirs +} + +fn non_empty_env_var(name: &str) -> Option { + env::var(name).ok().filter(|value| !value.is_empty()) +} + +fn resolve_preferred_wsl_host_home_dirs_with_root(users_root: &Path) -> Vec { + let userprofile = non_empty_env_var("USERPROFILE"); + let homedrive = non_empty_env_var("HOMEDRIVE"); + let homepath = non_empty_env_var("HOMEPATH"); + let home = non_empty_env_var("HOME"); + + resolve_preferred_wsl_host_home_dirs_for( + users_root, + userprofile.as_deref(), + homedrive.as_deref(), + homepath.as_deref(), + home.as_deref(), + ) +} + +fn global_config_home_dir(candidate_path: &Path) -> Option { + candidate_path + .parent() + .and_then(|parent| parent.parent()) + .map(PathBuf::from) +} + +fn select_wsl_host_global_config_path_for( + users_root: &Path, + userprofile: Option<&str>, + homedrive: Option<&str>, + homepath: Option<&str>, + home: Option<&str>, +) -> Option { + let candidates = find_wsl_host_global_config_paths_with_root(users_root); + let preferred_home_dirs = resolve_preferred_wsl_host_home_dirs_for( + users_root, + userprofile, + homedrive, + homepath, + home, + ); + + if !preferred_home_dirs.is_empty() { + for preferred_home_dir in preferred_home_dirs { + if let Some(candidate_path) = candidates.iter().find(|candidate_path| { + global_config_home_dir(candidate_path).as_ref() == Some(&preferred_home_dir) + }) { + return Some(candidate_path.clone()); + } + } + + return None; + } + + if candidates.len() == 1 { + return candidates.into_iter().next(); + } + + None +} + +fn select_wsl_host_global_config_path_with_root(users_root: &Path) -> Option { + let userprofile = non_empty_env_var("USERPROFILE"); + let homedrive = non_empty_env_var("HOMEDRIVE"); + let homepath = non_empty_env_var("HOMEPATH"); + let home = non_empty_env_var("HOME"); + + select_wsl_host_global_config_path_for( + users_root, + userprofile.as_deref(), + homedrive.as_deref(), + homepath.as_deref(), + home.as_deref(), + ) +} + +fn build_required_wsl_config_resolution_error(users_root: &Path) -> String { + let preferred_home_dirs = resolve_preferred_wsl_host_home_dirs_with_root(users_root); + let candidates = find_wsl_host_global_config_paths_with_root(users_root); + let config_lookup_pattern = format!( + "\"{}/*/{}/{}\"", + users_root.to_string_lossy(), + DEFAULT_GLOBAL_CONFIG_DIR, + DEFAULT_CONFIG_FILE_NAME + ); + + if candidates.is_empty() { + return format!("WSL host config file not found under {config_lookup_pattern}."); + } + + if !preferred_home_dirs.is_empty() { + return format!( + "WSL host config file for the current Windows user was not found under {config_lookup_pattern}." + ); + } + + format!( + "WSL host config file could not be matched to the current Windows user under {config_lookup_pattern}." + ) +} + +fn is_wsl_runtime_for( + os_name: &str, + wsl_distro_name: Option<&str>, + wsl_interop: Option<&str>, + release: &str, +) -> bool { + if os_name != "linux" { + return false; + } + + if wsl_distro_name.is_some_and(|value| !value.is_empty()) + || wsl_interop.is_some_and(|value| !value.is_empty()) + { + return true; + } + + release.to_lowercase().contains("microsoft") +} + +pub fn is_wsl_runtime() -> bool { + let release = fs::read_to_string("/proc/sys/kernel/osrelease").unwrap_or_default(); + let wsl_distro_name = env::var("WSL_DISTRO_NAME").ok(); + let wsl_interop = env::var("WSL_INTEROP").ok(); + + is_wsl_runtime_for( + env::consts::OS, + wsl_distro_name.as_deref(), + wsl_interop.as_deref(), + &release, + ) +} + +pub fn find_wsl_host_global_config_paths_with_root(users_root: &Path) -> Vec { + if !users_root.is_dir() { + return vec![]; + } + + let mut candidates: Vec = match fs::read_dir(users_root) { + Ok(entries) => entries + .filter_map(|entry| entry.ok()) + .filter_map(|entry| { + let entry_path = entry.path(); + if !entry_path.is_dir() { + return None; + } + + let candidate_path = entry_path + .join(DEFAULT_GLOBAL_CONFIG_DIR) + .join(DEFAULT_CONFIG_FILE_NAME); + if candidate_path.is_file() { + Some(candidate_path) + } else { + None + } + }) + .collect(), + Err(_) => vec![], + }; + + candidates.sort_by(|a, b| a.to_string_lossy().cmp(&b.to_string_lossy())); + candidates +} + +pub fn resolve_runtime_environment_with_root(users_root: PathBuf) -> RuntimeEnvironmentContext { + let native_home_dir = home_dir(); + let is_wsl = is_wsl_runtime(); + let selected_global_config_path = if is_wsl { + select_wsl_host_global_config_path_with_root(&users_root) + } else { + None + }; + let effective_home_dir = selected_global_config_path + .as_ref() + .and_then(|config_path| config_path.parent().and_then(|parent| parent.parent())) + .map(PathBuf::from) + .or_else(|| native_home_dir.clone()); + + RuntimeEnvironmentContext { + is_wsl, + native_home_dir, + effective_home_dir, + selected_global_config_path, + windows_users_root: users_root, + } +} + +pub fn resolve_runtime_environment() -> RuntimeEnvironmentContext { + resolve_runtime_environment_with_root(PathBuf::from(DEFAULT_WSL_WINDOWS_USERS_ROOT)) +} + +/// Resolve `~` prefix to the user's home directory. +pub fn resolve_tilde(p: &str) -> PathBuf { + let runtime_environment = resolve_runtime_environment(); + if let Some(rest) = p.strip_prefix('~') + && let Some(home) = runtime_environment + .effective_home_dir + .or(runtime_environment.native_home_dir) + { + let rest = rest + .strip_prefix('/') + .or_else(|| rest.strip_prefix('\\')) + .unwrap_or(rest); + return home.join(rest); + } + PathBuf::from(p) +} + +/// Get the global config file path: `~/.aindex/.tnmsc.json` +pub fn get_global_config_path() -> PathBuf { + let runtime_environment = resolve_runtime_environment(); + + if let Some(selected_path) = runtime_environment.selected_global_config_path { + return selected_path; + } + + match runtime_environment + .effective_home_dir + .or(runtime_environment.native_home_dir) + { + Some(home) => home + .join(DEFAULT_GLOBAL_CONFIG_DIR) + .join(DEFAULT_CONFIG_FILE_NAME), + None => PathBuf::from(DEFAULT_GLOBAL_CONFIG_DIR).join(DEFAULT_CONFIG_FILE_NAME), + } +} + +pub fn get_required_global_config_path() -> Result { + let runtime_environment = resolve_runtime_environment(); + + if runtime_environment.is_wsl && runtime_environment.selected_global_config_path.is_none() { + return Err(build_required_wsl_config_resolution_error( + &runtime_environment.windows_users_root, + )); + } + + Ok(get_global_config_path()) +} + +// --------------------------------------------------------------------------- +// Merge logic +// --------------------------------------------------------------------------- + +fn merge_aindex(a: &Option, b: &Option) -> Option { + match (a, b) { + (None, None) => None, + (Some(v), None) => Some(v.clone()), + (None, Some(v)) => Some(v.clone()), + (Some(base), Some(over)) => Some(AindexConfig { + dir: over.dir.clone().or_else(|| base.dir.clone()), + skills: DirPair::merge(&base.skills, &over.skills), + commands: DirPair::merge(&base.commands, &over.commands), + sub_agents: DirPair::merge(&base.sub_agents, &over.sub_agents), + rules: DirPair::merge(&base.rules, &over.rules), + global_prompt: DirPair::merge(&base.global_prompt, &over.global_prompt), + workspace_prompt: DirPair::merge(&base.workspace_prompt, &over.workspace_prompt), + app: DirPair::merge(&base.app, &over.app), + ext: DirPair::merge(&base.ext, &over.ext), + arch: DirPair::merge(&base.arch, &over.arch), + softwares: DirPair::merge(&base.softwares, &over.softwares), + }), + } +} + +fn merge_windows(a: &Option, b: &Option) -> Option { + match (a, b) { + (None, None) => None, + (Some(v), None) => Some(v.clone()), + (None, Some(v)) => Some(v.clone()), + (Some(base), Some(over)) => Some(WindowsOptions { + wsl2: match (&base.wsl2, &over.wsl2) { + (None, None) => None, + (Some(v), None) => Some(v.clone()), + (None, Some(v)) => Some(v.clone()), + (Some(base_wsl2), Some(over_wsl2)) => Some(WindowsWsl2Options { + instances: over_wsl2 + .instances + .clone() + .or_else(|| base_wsl2.instances.clone()), + }), + }, + }), + } +} + +/// Merge two configs. `over` fields take priority over `base`. +pub fn merge_configs_pair(base: &UserConfigFile, over: &UserConfigFile) -> UserConfigFile { + let merged_aindex = merge_aindex(&base.aindex, &over.aindex); + let merged_windows = merge_windows(&base.windows, &over.windows); + + UserConfigFile { + version: over.version.clone().or_else(|| base.version.clone()), + workspace_dir: over + .workspace_dir + .clone() + .or_else(|| base.workspace_dir.clone()), + aindex: merged_aindex, + log_level: over.log_level.clone().or_else(|| base.log_level.clone()), + fast_command_series_options: over + .fast_command_series_options + .clone() + .or_else(|| base.fast_command_series_options.clone()), + profile: over.profile.clone().or_else(|| base.profile.clone()), + windows: merged_windows, + } +} + +/// Merge a list of configs. First has highest priority, last has lowest. +fn merge_configs(configs: &[UserConfigFile]) -> UserConfigFile { + if configs.is_empty() { + return UserConfigFile::default(); + } + if configs.len() == 1 { + return configs[0].clone(); + } + // Reverse: merge from lowest to highest priority + let mut result = UserConfigFile::default(); + for config in configs.iter().rev() { + result = merge_configs_pair(&result, config); + } + result +} + +// --------------------------------------------------------------------------- +// ConfigLoader +// --------------------------------------------------------------------------- + +/// Options for ConfigLoader. +#[derive(Debug, Clone, Default)] +pub struct ConfigLoaderOptions {} + +/// ConfigLoader handles discovery and loading of user configuration files. +/// +/// The config source is fixed and unambiguous: +/// 1. Global: `~/.aindex/.tnmsc.json` +pub struct ConfigLoader { + logger: Logger, +} + +impl ConfigLoader { + pub fn new(_options: ConfigLoaderOptions) -> Self { + Self { + logger: create_logger("ConfigLoader", None), + } + } + + pub fn with_defaults() -> Self { + Self::new(ConfigLoaderOptions::default()) + } + + pub fn try_get_search_paths(&self, _cwd: &Path) -> Result, String> { + let runtime_environment = resolve_runtime_environment(); + + if runtime_environment.is_wsl { + self.logger.info( + Value::String("wsl environment detected".into()), + Some(serde_json::json!({ + "effectiveHomeDir": runtime_environment + .effective_home_dir + .as_ref() + .map(|path| path.to_string_lossy().into_owned()) + })), + ); + } + + let config_path = get_required_global_config_path()?; + if runtime_environment.is_wsl { + self.logger.info( + Value::String("using wsl host global config".into()), + Some(serde_json::json!({ + "path": config_path.to_string_lossy() + })), + ); + } + + Ok(vec![config_path]) + } + + /// Get the list of config file paths to search. + pub fn get_search_paths(&self, _cwd: &Path) -> Vec { + vec![get_global_config_path()] + } + + /// Load a single config file. + pub fn load_from_file(&self, file_path: &Path) -> ConfigLoadResult { + let resolved = if file_path.starts_with("~") { + resolve_tilde(&file_path.to_string_lossy()) + } else { + file_path.to_path_buf() + }; + + if !resolved.exists() { + return ConfigLoadResult { + config: UserConfigFile::default(), + source: None, + found: false, + }; + } + + match fs::read_to_string(&resolved) { + Ok(content) => match self.parse_config(&content, &resolved) { + Ok(config) => { + self.logger.debug( + Value::String("loaded".into()), + Some(serde_json::json!({"source": resolved.to_string_lossy()})), + ); + ConfigLoadResult { + config, + source: Some(resolved.to_string_lossy().into_owned()), + found: true, + } + } + Err(_) => ConfigLoadResult { + config: UserConfigFile::default(), + source: None, + found: false, + }, + }, + Err(e) => { + self.logger.warn(diagnostic( + "CONFIG_FILE_LOAD_FAILED", + "Config file could not be loaded", + line("The config file exists but could not be read, so it was skipped."), + Some(line( + "Check that the file exists, is readable, and is not locked.", + )), + None, + path_error_details(&resolved, &e.to_string()), + )); + ConfigLoadResult { + config: UserConfigFile::default(), + source: None, + found: false, + } + } + } + } + + pub fn try_load(&self, cwd: &Path) -> Result { + let search_paths = self.try_get_search_paths(cwd)?; + let mut loaded: Vec = Vec::new(); + + for path in &search_paths { + let result = self.load_from_file(path); + if result.found { + loaded.push(result); + } + } + + let configs: Vec = loaded.iter().map(|r| r.config.clone()).collect(); + let merged = merge_configs(&configs); + let sources: Vec = loaded.iter().filter_map(|r| r.source.clone()).collect(); + + Ok(MergedConfigResult { + config: merged, + sources, + found: !loaded.is_empty(), + }) + } + + /// Load and merge all config files. + pub fn load(&self, cwd: &Path) -> MergedConfigResult { + self.try_load(cwd).unwrap_or_else(|error| { + self.logger.error(diagnostic( + "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", + "Failed to resolve the global config path", + line("The runtime could not determine which global config file should be loaded."), + Some(line( + "Ensure the expected global config exists and retry the command.", + )), + None, + optional_details(serde_json::json!({ "error": error })), + )); + + MergedConfigResult { + config: UserConfigFile::default(), + sources: vec![], + found: false, + } + }) + } + + fn parse_config(&self, content: &str, file_path: &Path) -> Result { + let parsed: Value = serde_json::from_str(content) + .map_err(|e| format!("Invalid JSON in {}: {}", file_path.display(), e))?; + + if !parsed.is_object() { + return Err(format!( + "Config must be a JSON object in {}", + file_path.display() + )); + } + + // Deserialize with serde — invalid fields are silently ignored (like Zod's safeParse) + match serde_json::from_value::(parsed.clone()) { + Ok(config) => Ok(config), + Err(e) => { + self.logger.warn(diagnostic( + "CONFIG_FILE_VALIDATION_WARNING", + "Config contains invalid fields", + line("One or more config fields could not be deserialized, so defaults were used."), + Some(line("Fix the field types in the config file and retry.")), + None, + path_error_details(file_path, &e.to_string()), + )); + // Fallback: try to extract what we can + Ok( + serde_json::from_value::(Value::Object(Default::default())) + .unwrap_or_default(), + ) + } + } + } +} + +// --------------------------------------------------------------------------- +// Convenience functions +// --------------------------------------------------------------------------- + +/// Load user configuration using default loader. +pub fn load_user_config(cwd: &Path) -> Result { + ConfigLoader::with_defaults().try_load(cwd) +} + +// --------------------------------------------------------------------------- +// Config file management +// --------------------------------------------------------------------------- + +/// Write a config file with pretty JSON formatting. +pub fn write_config(path: &Path, config: &UserConfigFile, logger: &Logger) { + if let Some(parent) = path.parent() + && !parent.exists() + { + let _ = fs::create_dir_all(parent); + } + + match serde_json::to_string_pretty(config) { + Ok(json) => { + let content = format!("{}\n", json); + match fs::write(path, content) { + Ok(()) => { + logger.info( + Value::String("global config created".into()), + Some(serde_json::json!({"path": path.to_string_lossy()})), + ); + } + Err(e) => { + logger.warn(diagnostic( + "CONFIG_WRITE_FAILED", + "Failed to write the config file", + line("The CLI generated config JSON but could not write it to disk."), + Some(line( + "Check that the destination directory is writable and retry.", + )), + None, + path_error_details(path, &e.to_string()), + )); + } + } + } + Err(e) => { + logger.warn(diagnostic( + "CONFIG_SERIALIZATION_FAILED", + "Failed to serialize the config file", + line("The config object could not be converted to JSON."), + None, + None, + optional_details(serde_json::json!({ "error": e.to_string() })), + )); + } + } +} + +/// Validate global config file strictly. +/// +/// - If config doesn't exist: create default config, log warn, continue +/// - If config is invalid: preserve the file, log error, return should_exit=true +pub fn validate_and_ensure_global_config( + default_config: &UserConfigFile, +) -> GlobalConfigValidationResult { + let logger = create_logger("ConfigLoader", None); + let config_path = match get_required_global_config_path() { + Ok(path) => path, + Err(error) => { + logger.error(diagnostic( + "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", + "Failed to resolve the global config path", + line("The runtime could not determine the expected global config file location."), + Some(line( + "Ensure the required host config exists before retrying tnmsc.", + )), + None, + optional_details(serde_json::json!({ "error": error })), + )); + return GlobalConfigValidationResult { + valid: false, + exists: false, + errors: vec![error], + should_exit: true, + }; + } + }; + + if !config_path.exists() { + logger.warn(diagnostic( + "GLOBAL_CONFIG_MISSING_DEFAULT_CREATED", + "Global config was missing", + line("No global config file exists at the expected path, so a default file will be created."), + Some(line("Review the generated config if you need custom settings.")), + None, + path_details(&config_path), + )); + write_config(&config_path, default_config, &logger); + return GlobalConfigValidationResult { + valid: true, + exists: false, + errors: vec![], + should_exit: false, + }; + } + + // Try to read + let content = match fs::read_to_string(&config_path) { + Ok(c) => c, + Err(e) => { + let msg = format!("Failed to read config: {}", e); + logger.error(diagnostic( + "GLOBAL_CONFIG_READ_FAILED", + "Failed to read the global config", + line("The global config file exists but could not be read."), + Some(line( + "Check file permissions and confirm the path points to a readable file.", + )), + None, + path_error_details(&config_path, &e.to_string()), + )); + return preserve_invalid_config_and_exit(&config_path, &logger, vec![msg]); + } + }; + + // Try to parse JSON + let parsed: Value = match serde_json::from_str(&content) { + Ok(v) => v, + Err(e) => { + let msg = format!("Invalid JSON: {}", e); + logger.error(diagnostic( + "GLOBAL_CONFIG_INVALID_JSON", + "Global config contains invalid JSON", + line("The global config file is not valid JSON."), + Some(line("Fix the JSON syntax in the config file and retry.")), + None, + path_error_details(&config_path, &e.to_string()), + )); + return preserve_invalid_config_and_exit(&config_path, &logger, vec![msg]); + } + }; + + // Must be an object + if !parsed.is_object() { + logger.error(diagnostic( + "GLOBAL_CONFIG_NOT_OBJECT", + "Global config must be a JSON object", + line( + "The global config parsed successfully, but its top-level value is not an object.", + ), + Some(line( + "Replace the top-level JSON value with an object like `{}` and retry.", + )), + None, + path_details(&config_path), + )); + return preserve_invalid_config_and_exit( + &config_path, + &logger, + vec!["Config must be a JSON object".into()], + ); + } + + // Try to deserialize + if let Err(e) = serde_json::from_value::(parsed) { + let msg = format!("Config validation error: {}", e); + logger.error(diagnostic( + "GLOBAL_CONFIG_VALIDATION_FAILED", + "Global config failed schema validation", + line("The JSON shape does not match the expected config schema."), + Some(line( + "Fix the invalid field types or names in the config file and retry.", + )), + None, + path_error_details(&config_path, &e.to_string()), + )); + return preserve_invalid_config_and_exit(&config_path, &logger, vec![msg]); + } + + GlobalConfigValidationResult { + valid: true, + exists: true, + errors: vec![], + should_exit: false, + } +} + +fn preserve_invalid_config_and_exit( + config_path: &Path, + logger: &Logger, + errors: Vec, +) -> GlobalConfigValidationResult { + logger.error(diagnostic( + "GLOBAL_CONFIG_PRESERVED", + "Invalid global config was preserved", + line("The CLI stopped rather than overwriting the invalid global config."), + Some(line( + "Fix the file at the reported path and restart the command.", + )), + None, + path_details(config_path), + )); + + GlobalConfigValidationResult { + valid: false, + exists: true, + errors, + should_exit: true, + } +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn test_resolve_tilde() { + let resolved = resolve_tilde("~/test/path"); + if let Some(home) = home_dir() { + assert_eq!(resolved, home.join("test").join("path")); + } + } + + #[test] + fn test_resolve_tilde_no_tilde() { + let resolved = resolve_tilde("/absolute/path"); + assert_eq!(resolved, PathBuf::from("/absolute/path")); + } + + #[test] + fn test_user_config_file_default() { + let config = UserConfigFile::default(); + assert!(config.version.is_none()); + assert!(config.workspace_dir.is_none()); + assert!(config.aindex.is_none()); + assert!(config.log_level.is_none()); + } + + #[test] + fn test_user_config_file_deserialize() { + let json = r#"{ + "workspaceDir": "~/myworkspace", + "logLevel": "debug" + }"#; + let config: UserConfigFile = serde_json::from_str(json).unwrap(); + assert_eq!(config.workspace_dir.as_deref(), Some("~/myworkspace")); + assert_eq!(config.log_level.as_deref(), Some("debug")); + } + + #[test] + fn test_user_config_file_deserialize_with_aindex() { + let json = r#"{ + "aindex": { + "skills": {"src": "src/skills", "dist": "dist/skills"}, + "commands": {"src": "src/commands", "dist": "dist/commands"}, + "subAgents": {"src": "src/agents", "dist": "dist/agents"}, + "rules": {"src": "src/rules", "dist": "dist/rules"}, + "globalPrompt": {"src": "global.src.mdx", "dist": "dist/global.mdx"}, + "workspacePrompt": {"src": "workspace.src.mdx", "dist": "dist/workspace.mdx"}, + "app": {"src": "app", "dist": "dist/app"}, + "ext": {"src": "ext", "dist": "dist/ext"}, + "arch": {"src": "arch", "dist": "dist/arch"}, + "softwares": {"src": "softwares", "dist": "dist/softwares"} + } + }"#; + let config: UserConfigFile = serde_json::from_str(json).unwrap(); + let aindex = config.aindex.unwrap(); + assert_eq!( + aindex.skills.as_ref().unwrap().src.as_deref(), + Some("src/skills") + ); + assert_eq!( + aindex.commands.as_ref().unwrap().src.as_deref(), + Some("src/commands") + ); + } + + #[test] + fn test_user_config_file_deserialize_with_profile() { + let json = r#"{ + "profile": { + "name": "Zhang San", + "username": "zhangsan", + "gender": "male", + "birthday": "1990-01-01", + "customField": "custom value" + } + }"#; + let config: UserConfigFile = serde_json::from_str(json).unwrap(); + let profile = config.profile.unwrap(); + assert_eq!(profile.name.as_deref(), Some("Zhang San")); + assert_eq!( + profile.extra.get("customField").and_then(|v| v.as_str()), + Some("custom value") + ); + } + + #[test] + fn test_user_config_file_deserialize_with_windows_wsl2_instances() { + let json = r#"{ + "windows": { + "wsl2": { + "instances": ["Ubuntu", "Debian"] + } + } + }"#; + let config: UserConfigFile = serde_json::from_str(json).unwrap(); + + match config + .windows + .and_then(|windows| windows.wsl2) + .and_then(|wsl2| wsl2.instances) + { + Some(StringOrStrings::Multiple(instances)) => { + assert_eq!(instances, vec!["Ubuntu".to_string(), "Debian".to_string()]); + } + other => panic!("expected windows.wsl2.instances array, got {:?}", other), + } + } + + #[test] + fn test_user_config_file_roundtrip() { + let config = UserConfigFile { + workspace_dir: Some("~/workspace".into()), + log_level: Some("info".into()), + ..Default::default() + }; + let json = serde_json::to_string(&config).unwrap(); + let parsed: UserConfigFile = serde_json::from_str(&json).unwrap(); + assert_eq!(config, parsed); + } + + #[test] + fn test_merge_configs_empty() { + let result = merge_configs(&[]); + assert_eq!(result, UserConfigFile::default()); + } + + #[test] + fn test_merge_configs_single() { + let config = UserConfigFile { + workspace_dir: Some("~/ws".into()), + ..Default::default() + }; + let result = merge_configs(std::slice::from_ref(&config)); + assert_eq!(result, config); + } + + #[test] + fn test_merge_configs_priority() { + let cwd_config = UserConfigFile { + workspace_dir: Some("~/cwd-workspace".into()), + log_level: Some("debug".into()), + ..Default::default() + }; + let global_config = UserConfigFile { + workspace_dir: Some("~/global-workspace".into()), + log_level: Some("info".into()), + aindex: Some(AindexConfig { + skills: Some(DirPair { + src: Some("global/skills".into()), + dist: Some("global/dist/skills".into()), + }), + ..Default::default() + }), + ..Default::default() + }; + + // cwd_config is first (highest priority) + let result = merge_configs(&[cwd_config, global_config]); + assert_eq!(result.workspace_dir.as_deref(), Some("~/cwd-workspace")); + assert_eq!(result.log_level.as_deref(), Some("debug")); + assert_eq!( + result + .aindex + .as_ref() + .and_then(|s| s.skills.as_ref()) + .and_then(|p| p.src.as_deref()), + Some("global/skills") + ); + } + + #[test] + fn test_merge_configs_merges_windows_options() { + let base_config = UserConfigFile { + windows: Some(WindowsOptions { + wsl2: Some(WindowsWsl2Options { + instances: Some(StringOrStrings::Single("Ubuntu".into())), + }), + }), + ..Default::default() + }; + let override_config = UserConfigFile { + log_level: Some("debug".into()), + ..Default::default() + }; + + let merged = merge_configs_pair(&base_config, &override_config); + match merged + .windows + .and_then(|windows| windows.wsl2) + .and_then(|wsl2| wsl2.instances) + { + Some(StringOrStrings::Single(instance)) => assert_eq!(instance, "Ubuntu"), + other => panic!( + "expected merged windows.wsl2.instances value, got {:?}", + other + ), + } + } + + #[test] + fn test_merge_aindex_deep() { + let cwd_config = UserConfigFile { + aindex: Some(AindexConfig { + skills: Some(DirPair { + src: Some("custom/skills".into()), + dist: Some("custom/dist/skills".into()), + }), + ..Default::default() + }), + ..Default::default() + }; + let global_config = UserConfigFile { + aindex: Some(AindexConfig { + skills: Some(DirPair { + src: Some("src/skills".into()), + dist: Some("dist/skills".into()), + }), + commands: Some(DirPair { + src: Some("src/commands".into()), + dist: Some("dist/commands".into()), + }), + ..Default::default() + }), + ..Default::default() + }; + + let result = merge_configs(&[cwd_config, global_config]); + let aindex = result.aindex.unwrap(); + assert_eq!( + aindex.skills.as_ref().unwrap().src.as_deref(), + Some("custom/skills") + ); + assert_eq!( + aindex.commands.as_ref().unwrap().src.as_deref(), + Some("src/commands") + ); + } + + #[test] + fn test_config_loader_search_paths() { + let loader = ConfigLoader::with_defaults(); + let cwd = PathBuf::from("/workspace/project"); + let paths = loader.get_search_paths(&cwd); + + assert_eq!(paths, vec![get_global_config_path()]); + } + + #[test] + fn test_find_wsl_host_global_config_paths_with_root_sorts_candidates() { + let temp_dir = TempDir::new().unwrap(); + let users_root = temp_dir.path().join("Users"); + let alpha_config_path = users_root.join("alpha").join(".aindex").join(".tnmsc.json"); + let bravo_config_path = users_root.join("bravo").join(".aindex").join(".tnmsc.json"); + + fs::create_dir_all(alpha_config_path.parent().unwrap()).unwrap(); + fs::create_dir_all(bravo_config_path.parent().unwrap()).unwrap(); + fs::write(&alpha_config_path, "{}\n").unwrap(); + fs::write(&bravo_config_path, "{}\n").unwrap(); + + let candidates = find_wsl_host_global_config_paths_with_root(&users_root); + assert_eq!(candidates, vec![alpha_config_path, bravo_config_path]); + } + + #[test] + fn test_select_wsl_host_global_config_path_for_prefers_matching_userprofile() { + let temp_dir = TempDir::new().unwrap(); + let users_root = temp_dir.path().join("Users"); + let alpha_config_path = users_root.join("alpha").join(".aindex").join(".tnmsc.json"); + let bravo_config_path = users_root.join("bravo").join(".aindex").join(".tnmsc.json"); + + fs::create_dir_all(alpha_config_path.parent().unwrap()).unwrap(); + fs::create_dir_all(bravo_config_path.parent().unwrap()).unwrap(); + fs::write(&alpha_config_path, "{}\n").unwrap(); + fs::write(&bravo_config_path, "{}\n").unwrap(); + + let selected = select_wsl_host_global_config_path_for( + &users_root, + Some(&users_root.join("bravo").to_string_lossy()), + None, + None, + None, + ); + + assert_eq!(selected, Some(bravo_config_path)); + } + + #[test] + fn test_select_wsl_host_global_config_path_for_rejects_other_windows_profile() { + let temp_dir = TempDir::new().unwrap(); + let users_root = temp_dir.path().join("Users"); + let alpha_config_path = users_root.join("alpha").join(".aindex").join(".tnmsc.json"); + + fs::create_dir_all(alpha_config_path.parent().unwrap()).unwrap(); + fs::write(&alpha_config_path, "{}\n").unwrap(); + + let selected = select_wsl_host_global_config_path_for( + &users_root, + Some(&users_root.join("bravo").to_string_lossy()), + None, + None, + None, + ); + + assert_eq!(selected, None); + } + + #[test] + fn test_is_wsl_runtime_for_detects_linux_wsl_inputs() { + assert!(is_wsl_runtime_for("linux", Some("Ubuntu"), None, "")); + assert!(is_wsl_runtime_for( + "linux", + None, + Some("/run/WSL/12_interop"), + "" + )); + assert!(is_wsl_runtime_for( + "linux", + None, + None, + "5.15.167.4-microsoft-standard-WSL2" + )); + assert!(!is_wsl_runtime_for("windows", Some("Ubuntu"), None, "")); + } + + #[test] + fn test_config_loader_load_nonexistent() { + let loader = ConfigLoader::with_defaults(); + let result = loader.load_from_file(Path::new("/nonexistent/.tnmsc.json")); + assert!(!result.found); + assert!(result.source.is_none()); + } + + #[test] + fn test_dir_pair_merge() { + let a = Some(DirPair { + src: Some("a-src".into()), + dist: Some("a-dist".into()), + }); + let b = Some(DirPair { + src: Some("b-src".into()), + dist: None, + }); + let merged = DirPair::merge(&a, &b).unwrap(); + assert_eq!(merged.src.as_deref(), Some("b-src")); + assert_eq!(merged.dist.as_deref(), Some("a-dist")); + } + + #[test] + fn test_global_config_path() { + let path = get_global_config_path(); + let path_str = path.to_string_lossy(); + assert!(path_str.contains(DEFAULT_GLOBAL_CONFIG_DIR)); + assert!(path_str.contains(DEFAULT_CONFIG_FILE_NAME)); + } + + #[test] + fn test_preserve_invalid_config_and_exit_keeps_original_file() { + let temp_dir = match TempDir::new() { + Ok(value) => value, + Err(error) => panic!("failed to create temp dir: {error}"), + }; + let config_path = temp_dir.path().join(DEFAULT_CONFIG_FILE_NAME); + let invalid_content = "{invalid-json"; + + if let Err(error) = fs::write(&config_path, invalid_content) { + panic!("failed to write invalid config fixture: {error}"); + } + + let logger = create_logger("ConfigLoaderTest", None); + let result = + preserve_invalid_config_and_exit(&config_path, &logger, vec!["Invalid JSON".into()]); + + assert!(!result.valid); + assert!(result.exists); + assert!(result.should_exit); + assert_eq!(result.errors, vec!["Invalid JSON".to_string()]); + + let retained = match fs::read_to_string(&config_path) { + Ok(value) => value, + Err(error) => panic!("failed to read retained config: {error}"), + }; + assert_eq!(retained, invalid_content); + } +} + +// =========================================================================== +// NAPI binding layer (only compiled with --features napi) +// =========================================================================== + +#[cfg(feature = "napi")] +mod napi_binding { + use super::*; + use napi_derive::napi; + + /// Load and merge user configuration from the given cwd directory. + /// Returns the merged config as a JSON string. + #[napi] + pub fn load_user_config(cwd: String) -> napi::Result { + let path = std::path::Path::new(&cwd); + let result = super::load_user_config(path).map_err(napi::Error::from_reason)?; + serde_json::to_string(&result.config).map_err(|e| napi::Error::from_reason(e.to_string())) + } + + /// Get the global config file path (~/.aindex/.tnmsc.json). + #[napi] + pub fn get_global_config_path_str() -> napi::Result { + get_required_global_config_path() + .map(|path| path.to_string_lossy().into_owned()) + .map_err(napi::Error::from_reason) + } + + /// Merge two config JSON strings. `over` fields take priority over `base`. + #[napi] + pub fn merge_configs(base_json: String, over_json: String) -> napi::Result { + let base: UserConfigFile = serde_json::from_str(&base_json) + .map_err(|e| napi::Error::from_reason(format!("base: {e}")))?; + let over: UserConfigFile = serde_json::from_str(&over_json) + .map_err(|e| napi::Error::from_reason(format!("over: {e}")))?; + let merged = merge_configs_pair(&base, &over); + serde_json::to_string(&merged).map_err(|e| napi::Error::from_reason(e.to_string())) + } + + /// Load config from a specific file path. Returns JSON string or null if not found. + #[napi] + pub fn load_config_from_file(file_path: String) -> napi::Result> { + let loader = ConfigLoader::with_defaults(); + let result = loader.load_from_file(std::path::Path::new(&file_path)); + if !result.found { + return Ok(None); + } + let json = serde_json::to_string(&result.config) + .map_err(|e| napi::Error::from_reason(e.to_string()))?; + Ok(Some(json)) + } +} diff --git a/sdk/src/core/config/series_filter.rs b/sdk/src/core/config/series_filter.rs new file mode 100644 index 00000000..0a7765ee --- /dev/null +++ b/sdk/src/core/config/series_filter.rs @@ -0,0 +1,228 @@ +//! Series-based filtering helpers (NAPI-exported). +//! +//! Mirrors the pure-TS implementations in `seriesFilter.ts`. +//! Each function is gated behind the `napi` feature so the crate +//! still compiles as a plain Rust library without Node bindings. + +use std::collections::{HashMap, HashSet}; + +// --------------------------------------------------------------------------- +// Core logic (always available) +// --------------------------------------------------------------------------- + +/// Compute the effective includeSeries as the set union of two optional arrays. +/// Returns an empty vec when both are `None` (no filtering — all items pass). +pub fn resolve_effective_include_series_core( + top_level: Option<&[String]>, + type_specific: Option<&[String]>, +) -> Vec { + match (top_level, type_specific) { + (None, None) => Vec::new(), + (Some(a), None) => a + .iter() + .collect::>() + .into_iter() + .cloned() + .collect(), + (None, Some(b)) => b + .iter() + .collect::>() + .into_iter() + .cloned() + .collect(), + (Some(a), Some(b)) => { + let mut set = HashSet::new(); + for s in a.iter().chain(b.iter()) { + set.insert(s.clone()); + } + set.into_iter().collect() + } + } +} + +/// Determine whether a prompt item should be included. +/// +/// - `None` seri_name → always included +/// - empty effective list → always included (no filtering configured) +/// - single string → included iff member of the list +/// - array → included iff any element intersects the list +pub fn matches_series_core( + seri_name: Option<&SeriName>, + effective_include_series: &[String], +) -> bool { + let seri = match seri_name { + None => return true, + Some(s) => s, + }; + if effective_include_series.is_empty() { + return true; + } + let set: HashSet<&str> = effective_include_series + .iter() + .map(String::as_str) + .collect(); + match seri { + SeriName::Single(s) => set.contains(s.as_str()), + SeriName::Multiple(arr) => arr.iter().any(|s| set.contains(s.as_str())), + } +} + +/// Deep-merge two optional subSeries records. +/// For each key present in either record the result is the set union of both +/// value arrays. Returns an empty map when both are `None`. +pub fn resolve_sub_series_core( + top_level: Option<&HashMap>>, + type_specific: Option<&HashMap>>, +) -> HashMap> { + match (top_level, type_specific) { + (None, None) => HashMap::new(), + (Some(a), None) => a.clone(), + (None, Some(b)) => b.clone(), + (Some(a), Some(b)) => { + let mut merged = a.clone(); + for (key, values) in b { + let entry = merged.entry(key.clone()).or_default(); + let mut set: HashSet = entry.drain(..).collect(); + for v in values { + set.insert(v.clone()); + } + *entry = set.into_iter().collect(); + } + merged + } + } +} + +/// Wrapper enum for the `seriName` parameter (string or string array). +pub enum SeriName { + Single(String), + Multiple(Vec), +} + +// --------------------------------------------------------------------------- +// NAPI binding layer +// --------------------------------------------------------------------------- + +#[cfg(feature = "napi")] +mod napi_binding { + use std::collections::HashMap; + + use napi::Either; + use napi_derive::napi; + + use super::*; + + /// Determine whether a prompt item should be included based on its + /// `seriName` and the effective `includeSeries` list. + #[napi] + pub fn matches_series( + seri_name: Option>>, + effective_include_series: Vec, + ) -> bool { + let seri = seri_name.map(|e| match e { + Either::A(s) => SeriName::Single(s), + Either::B(arr) => SeriName::Multiple(arr), + }); + matches_series_core(seri.as_ref(), &effective_include_series) + } + + /// Compute the effective includeSeries as the set union of top-level and + /// type-specific arrays. + #[napi] + pub fn resolve_effective_include_series( + top_level: Option>, + type_specific: Option>, + ) -> Vec { + resolve_effective_include_series_core(top_level.as_deref(), type_specific.as_deref()) + } + + /// Deep-merge two optional subSeries records. + #[napi] + pub fn resolve_sub_series( + top_level: Option>>, + type_specific: Option>>, + ) -> HashMap> { + resolve_sub_series_core(top_level.as_ref(), type_specific.as_ref()) + } +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_resolve_effective_both_none() { + let result = resolve_effective_include_series_core(None, None); + assert!(result.is_empty()); + } + + #[test] + fn test_resolve_effective_union() { + let a = vec!["x".into(), "y".into()]; + let b = vec!["y".into(), "z".into()]; + let mut result = resolve_effective_include_series_core(Some(&a), Some(&b)); + result.sort(); + assert_eq!(result, vec!["x", "y", "z"]); + } + + #[test] + fn test_matches_series_none_seri() { + assert!(matches_series_core(None, &["a".into()])); + } + + #[test] + fn test_matches_series_empty_list() { + let seri = SeriName::Single("a".into()); + assert!(matches_series_core(Some(&seri), &[])); + } + + #[test] + fn test_matches_series_string_hit() { + let seri = SeriName::Single("a".into()); + assert!(matches_series_core(Some(&seri), &["a".into(), "b".into()])); + } + + #[test] + fn test_matches_series_string_miss() { + let seri = SeriName::Single("c".into()); + assert!(!matches_series_core(Some(&seri), &["a".into(), "b".into()])); + } + + #[test] + fn test_matches_series_array_intersection() { + let seri = SeriName::Multiple(vec!["c".into(), "a".into()]); + assert!(matches_series_core(Some(&seri), &["a".into(), "b".into()])); + } + + #[test] + fn test_matches_series_array_no_intersection() { + let seri = SeriName::Multiple(vec!["c".into(), "d".into()]); + assert!(!matches_series_core(Some(&seri), &["a".into(), "b".into()])); + } + + #[test] + fn test_resolve_sub_series_both_none() { + let result = resolve_sub_series_core(None, None); + assert!(result.is_empty()); + } + + #[test] + fn test_resolve_sub_series_merge() { + let mut a = HashMap::new(); + a.insert("k".into(), vec!["v1".into()]); + let mut b = HashMap::new(); + b.insert("k".into(), vec!["v1".into(), "v2".into()]); + b.insert("k2".into(), vec!["v3".into()]); + + let result = resolve_sub_series_core(Some(&a), Some(&b)); + assert_eq!(result.len(), 2); + let mut k_vals = result["k"].clone(); + k_vals.sort(); + assert_eq!(k_vals, vec!["v1", "v2"]); + assert_eq!(result["k2"], vec!["v3"]); + } +} diff --git a/sdk/src/core/desk-paths.ts b/sdk/src/core/desk-paths.ts new file mode 100644 index 00000000..b66f9505 --- /dev/null +++ b/sdk/src/core/desk-paths.ts @@ -0,0 +1,179 @@ +import type {Buffer} from 'node:buffer' +import {buildFileOperationDiagnostic} from '@/diagnostics' +import {getNativeBinding} from './native-binding' + +export interface DeletionError { + readonly path: string + readonly error: unknown +} + +export interface DeletionResult { + readonly deleted: number + readonly deletedPaths: readonly string[] + readonly errors: readonly DeletionError[] +} + +export interface DeleteTargetsResult { + readonly deletedFiles: readonly string[] + readonly deletedDirs: readonly string[] + readonly fileErrors: readonly DeletionError[] + readonly dirErrors: readonly DeletionError[] +} + +export interface WriteLogger { + readonly trace: (data: object) => void + readonly error: (diagnostic: object) => void +} + +export interface SafeWriteOptions { + readonly fullPath: string + readonly content: string | Buffer + readonly type: string + readonly relativePath: string + readonly dryRun: boolean + readonly logger: WriteLogger +} + +export interface SafeWriteResult { + readonly path: string + readonly success: boolean + readonly skipped?: boolean + readonly error?: Error +} + +interface NativeDeskPathsBinding { + readonly getPlatformFixedDir?: () => string + readonly ensureDir?: (dir: string) => void + readonly existsSync?: (targetPath: string) => boolean + readonly deletePathSync?: (targetPath: string) => void + readonly writeFileSync?: (filePath: string, data: string | Buffer, encoding?: BufferEncoding) => void + readonly readFileSync?: (filePath: string, encoding?: BufferEncoding) => string + readonly deleteFiles?: (files: readonly string[]) => DeletionResult | Promise + readonly deleteDirectories?: (dirs: readonly string[]) => DeletionResult | Promise + readonly deleteEmptyDirectories?: (dirs: readonly string[]) => DeletionResult | Promise + readonly deleteTargets?: (targets: {readonly files?: readonly string[], readonly dirs?: readonly string[]}) => DeleteTargetsResult | Promise +} + +type NativeDeletionResult = DeletionResult & { + readonly deleted_paths?: readonly string[] +} + +type NativeDeleteTargetsResult = DeleteTargetsResult & { + readonly deleted_files?: readonly string[] + readonly deleted_dirs?: readonly string[] + readonly file_errors?: readonly DeletionError[] + readonly dir_errors?: readonly DeletionError[] +} + +function requireNativeDeskPathsBinding(): NativeDeskPathsBinding { + const binding = getNativeBinding() + if (binding == null) { + throw new Error('Native desk-paths binding is required. Build or install the Rust NAPI package before running tnmsc.') + } + return binding +} + +function requireDeskPathsMethod( + methodName: K +): NonNullable { + const binding = requireNativeDeskPathsBinding() + const method = binding[methodName] + if (method == null) { + throw new Error(`Native desk-paths binding is missing "${String(methodName)}". Rebuild the Rust NAPI package before running tnmsc.`) + } + return method +} + +function normalizeDeletionResult(result: NativeDeletionResult): DeletionResult { + return { + deleted: result.deleted, + deletedPaths: result.deletedPaths ?? result.deleted_paths ?? [], + errors: result.errors ?? [] + } +} + +function normalizeDeleteTargetsResult(result: NativeDeleteTargetsResult): DeleteTargetsResult { + return { + deletedFiles: result.deletedFiles ?? result.deleted_files ?? [], + deletedDirs: result.deletedDirs ?? result.deleted_dirs ?? [], + fileErrors: result.fileErrors ?? result.file_errors ?? [], + dirErrors: result.dirErrors ?? result.dir_errors ?? [] + } +} + +export function getPlatformFixedDir(): string { + return requireDeskPathsMethod('getPlatformFixedDir')() +} + +export function ensureDir(dir: string): void { + requireDeskPathsMethod('ensureDir')(dir) +} + +export function existsSync(targetPath: string): boolean { + return requireDeskPathsMethod('existsSync')(targetPath) +} + +export function deletePathSync(targetPath: string): void { + requireDeskPathsMethod('deletePathSync')(targetPath) +} + +export function writeFileSync(filePath: string, data: string | Buffer, encoding: BufferEncoding = 'utf8'): void { + requireDeskPathsMethod('writeFileSync')(filePath, data, encoding) +} + +export function readFileSync(filePath: string, encoding: BufferEncoding = 'utf8'): string { + return requireDeskPathsMethod('readFileSync')(filePath, encoding) +} + +export async function deleteFiles(files: readonly string[]): Promise { + return normalizeDeletionResult(await Promise.resolve(requireDeskPathsMethod('deleteFiles')(files) as NativeDeletionResult)) +} + +export async function deleteDirectories(dirs: readonly string[]): Promise { + return normalizeDeletionResult(await Promise.resolve(requireDeskPathsMethod('deleteDirectories')(dirs) as NativeDeletionResult)) +} + +export async function deleteEmptyDirectories(dirs: readonly string[]): Promise { + return normalizeDeletionResult(await Promise.resolve(requireDeskPathsMethod('deleteEmptyDirectories')(dirs) as NativeDeletionResult)) +} + +export async function deleteTargets(targets: { + readonly files?: readonly string[] + readonly dirs?: readonly string[] +}): Promise { + return normalizeDeleteTargetsResult(await Promise.resolve(requireDeskPathsMethod('deleteTargets')({ + files: targets.files ?? [], + dirs: targets.dirs ?? [] + }) as NativeDeleteTargetsResult)) +} + +export function writeFileSafe(options: SafeWriteOptions): SafeWriteResult { + const {fullPath, content, type, relativePath, dryRun, logger} = options + + if (dryRun) { + logger.trace({action: 'dryRun', type, path: fullPath}) + return {path: relativePath, success: true, skipped: false} + } + + try { + writeFileSync(fullPath, content) + logger.trace({action: 'write', type, path: fullPath}) + return {path: relativePath, success: true} + } + catch (error) { + const errMsg = error instanceof Error ? error.message : String(error) + logger.error(buildFileOperationDiagnostic({ + code: 'OUTPUT_FILE_WRITE_FAILED', + title: `Failed to write ${type} output`, + operation: 'write', + targetKind: `${type} output file`, + path: fullPath, + error: errMsg, + details: { + relativePath, + type + } + })) + return {path: relativePath, success: false, error: error as Error} + } +} diff --git a/sdk/src/core/desk_paths.rs b/sdk/src/core/desk_paths.rs new file mode 100644 index 00000000..c308fc6e --- /dev/null +++ b/sdk/src/core/desk_paths.rs @@ -0,0 +1,623 @@ +use std::env; +use std::fs; +use std::io; +use std::path::{Path, PathBuf}; + +use thiserror::Error; + +use crate::core::config; + +const WINDOWS_DRIVE_PREFIX_LEN: usize = 2; + +/// Errors emitted by the desk-paths helpers. +#[derive(Debug, Error)] +pub enum DeskPathsError { + #[error("{0}")] + Io(#[from] io::Error), + #[error("unsupported platform: {0}")] + UnsupportedPlatform(String), +} + +pub type DeskPathsResult = Result; + +/// Platform shim that mirrors the values used by the legacy TS module. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Platform { + Win32, + Linux, + Darwin, +} + +impl Platform { + fn from_runtime(ctx: &config::RuntimeEnvironmentContext) -> Self { + if ctx.is_wsl { + return Platform::Win32; + } + match env::consts::OS { + "macos" => Platform::Darwin, + "windows" => Platform::Win32, + _ => Platform::Linux, + } + } + + fn is_windows(self) -> bool { + matches!(self, Platform::Win32) + } +} + +pub fn get_platform_fixed_dir() -> DeskPathsResult { + let ctx = config::resolve_runtime_environment(); + let platform = Platform::from_runtime(&ctx); + let target = match platform { + Platform::Win32 => get_windows_fixed_dir(&ctx), + Platform::Darwin => get_home_dir(&ctx) + .join("Library") + .join("Application Support"), + Platform::Linux => get_linux_data_dir(&ctx), + }; + Ok(target.to_string_lossy().into_owned()) +} + +fn get_windows_fixed_dir(ctx: &config::RuntimeEnvironmentContext) -> PathBuf { + let default = get_home_dir(ctx).join("AppData").join("Local"); + let candidate = + env::var("LOCALAPPDATA").unwrap_or_else(|_| default.to_string_lossy().into_owned()); + PathBuf::from(resolve_user_path(&candidate, ctx)) +} + +fn get_linux_data_dir(ctx: &config::RuntimeEnvironmentContext) -> PathBuf { + if let Ok(xdg_data_home) = env::var("XDG_DATA_HOME") + && !xdg_data_home.trim().is_empty() + { + return PathBuf::from(resolve_user_path(&xdg_data_home, ctx)); + } + get_home_dir(ctx).join(".local").join("share") +} + +fn get_home_dir(ctx: &config::RuntimeEnvironmentContext) -> PathBuf { + ctx.effective_home_dir + .as_ref() + .cloned() + .or_else(|| ctx.native_home_dir.clone()) + .unwrap_or_else(|| dirs::home_dir().unwrap_or_else(|| PathBuf::from("/"))) +} + +fn resolve_user_path(raw_path: &str, ctx: &config::RuntimeEnvironmentContext) -> String { + let platform = Platform::from_runtime(ctx); + let home_dir = get_home_dir(ctx); + let expanded = expand_home_directory(raw_path, &home_dir); + if ctx.is_wsl { + if let Some(converted) = convert_windows_path_to_wsl(&expanded) { + return normalize_posix_like_path(&converted, true); + } + return normalize_posix_like_path(&expanded, true); + } + if platform.is_windows() { + normalize_windows_path(&expanded) + } else { + normalize_posix_like_path(&expanded, false) + } +} + +fn expand_home_directory(raw_path: &str, home_dir: &Path) -> String { + if raw_path == "~" { + return normalize_posix_like_path(&home_dir.to_string_lossy(), false); + } + if raw_path.starts_with("~/") || raw_path.starts_with("~\\") { + let suffix = &raw_path[2..]; + let normalized = suffix.replace('\\', "/"); + let mut joined = PathBuf::from(home_dir); + for component in normalized.split('/') { + if component.is_empty() || component == "." { + continue; + } + if component == ".." { + joined.pop(); + } else { + joined.push(component); + } + } + return normalize_posix_like_path(&joined.to_string_lossy(), false); + } + raw_path.to_string() +} + +fn normalize_posix_like_path(raw_path: &str, preserve_slashes: bool) -> String { + let replaced = raw_path.replace('\\', "/"); + let is_absolute = replaced.starts_with('/'); + let mut components = Vec::new(); + for segment in replaced.split('/') { + if segment.is_empty() || segment == "." { + continue; + } + if segment == ".." { + components.pop(); + continue; + } + components.push(segment); + } + let mut normalized = String::new(); + if is_absolute { + normalized.push('/'); + } + normalized.push_str(&components.join("/")); + if normalized.is_empty() { + if is_absolute { + normalized.push('/'); + } else if preserve_slashes { + normalized.push('.'); + } + } + normalized +} + +fn normalize_windows_path(raw_path: &str) -> String { + let replaced = raw_path.replace('/', "\\"); + let mut components = Vec::new(); + let mut rest = replaced.as_str(); + let mut prefix = String::new(); + if rest.len() >= WINDOWS_DRIVE_PREFIX_LEN && rest.as_bytes()[1] == b':' { + prefix = rest[..WINDOWS_DRIVE_PREFIX_LEN].to_ascii_uppercase(); + rest = &rest[WINDOWS_DRIVE_PREFIX_LEN..]; + } + for segment in rest.split('\\') { + if segment.is_empty() || segment == "." { + continue; + } + if segment == ".." { + components.pop(); + continue; + } + components.push(segment); + } + let mut normalized = prefix.clone(); + if !normalized.is_empty() && !components.is_empty() { + normalized.push('\\'); + } + normalized.push_str(&components.join("\\")); + if normalized.is_empty() { + normalized.push('.'); + } + normalized +} + +fn convert_windows_path_to_wsl(raw_path: &str) -> Option { + let bytes = raw_path.as_bytes(); + if bytes.len() < WINDOWS_DRIVE_PREFIX_LEN + 1 || bytes[1] != b':' { + return None; + } + let drive_letter = (bytes[0] as char).to_ascii_lowercase(); + if !drive_letter.is_ascii_alphabetic() { + return None; + } + let mut rest = &raw_path[WINDOWS_DRIVE_PREFIX_LEN..]; + if rest.starts_with('\\') || rest.starts_with('/') { + rest = &rest[1..]; + } + let normalized = rest.replace('\\', "/"); + let prefix = format!("/mnt/{}", drive_letter); + if normalized.is_empty() { + return Some(prefix); + } + Some(format!("{}/{}", prefix, normalized)) +} + +pub fn ensure_dir>(dir: P) -> io::Result<()> { + fs::create_dir_all(dir) +} + +pub fn exists_sync>(path: P) -> bool { + path.as_ref().exists() +} + +pub fn delete_path_sync>(path: P) -> io::Result<()> { + delete_path(path).map(|_| ()) +} + +fn delete_path(path: impl AsRef) -> io::Result { + let path = path.as_ref(); + let metadata = match fs::symlink_metadata(path) { + Ok(metadata) => metadata, + Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(false), + Err(err) => return Err(err), + }; + + if metadata.file_type().is_symlink() { + #[cfg(windows)] + { + return fs::metadata(path) + .map(|resolved| resolved.is_dir()) + .unwrap_or(false) + .then(|| fs::remove_dir(path).or_else(|_| fs::remove_file(path))) + .unwrap_or_else(|| fs::remove_file(path).or_else(|_| fs::remove_dir(path))) + .map(|_| true); + } + #[cfg(not(windows))] + { + return fs::remove_file(path).map(|_| true); + } + } + + if metadata.is_dir() { + fs::remove_dir_all(path).map(|_| true) + } else { + fs::remove_file(path).map(|_| true) + } +} + +pub fn write_file_sync>(path: P, content: &[u8]) -> io::Result<()> { + if let Some(parent) = path.as_ref().parent() { + fs::create_dir_all(parent)?; + } + fs::write(path, content) +} + +pub fn read_file_sync>(path: P) -> io::Result { + fs::read_to_string(&path).map_err(|err| { + io::Error::new( + err.kind(), + format!( + "Failed to read file \"{}\": {}", + path.as_ref().display(), + err + ), + ) + }) +} + +pub struct DeletionError { + pub path: String, + pub error: String, +} + +pub struct DeletionResult { + pub deleted: usize, + pub deleted_paths: Vec, + pub errors: Vec, +} + +pub struct DeleteTargetsResult { + pub deleted_files: Vec, + pub deleted_dirs: Vec, + pub file_errors: Vec, + pub dir_errors: Vec, +} + +fn delete_empty_directory(path: impl AsRef) -> io::Result { + let path = path.as_ref(); + let metadata = match fs::symlink_metadata(path) { + Ok(metadata) => metadata, + Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(false), + Err(err) => return Err(err), + }; + + if metadata.file_type().is_symlink() || !metadata.is_dir() { + return Ok(false); + } + + match fs::remove_dir(path) { + Ok(()) => Ok(true), + Err(err) + if err.kind() == io::ErrorKind::NotFound + || err.kind() == io::ErrorKind::DirectoryNotEmpty => + { + Ok(false) + } + Err(err) => Err(err), + } +} + +pub fn delete_files(paths: &[String]) -> DeletionResult { + let mut result = DeletionResult { + deleted: 0, + deleted_paths: Vec::new(), + errors: Vec::new(), + }; + for path in paths { + match delete_path(Path::new(path)) { + Ok(true) => { + result.deleted += 1; + result.deleted_paths.push(path.clone()); + } + Ok(false) => {} + Err(err) => result.errors.push(DeletionError { + path: path.clone(), + error: err.to_string(), + }), + } + } + result +} + +pub fn delete_directories(paths: &[String]) -> DeletionResult { + let mut sorted_paths = paths.to_vec(); + sorted_paths.sort_by(|a, b| b.len().cmp(&a.len()).then_with(|| b.cmp(a))); + + let mut result = DeletionResult { + deleted: 0, + deleted_paths: Vec::new(), + errors: Vec::new(), + }; + for path in &sorted_paths { + match delete_path(Path::new(path)) { + Ok(true) => { + result.deleted += 1; + result.deleted_paths.push(path.clone()); + } + Ok(false) => {} + Err(err) => result.errors.push(DeletionError { + path: path.clone(), + error: err.to_string(), + }), + } + } + result +} + +pub fn delete_empty_directories(paths: &[String]) -> DeletionResult { + let mut sorted_paths = paths.to_vec(); + sorted_paths.sort_by(|a, b| b.len().cmp(&a.len()).then_with(|| b.cmp(a))); + + let mut result = DeletionResult { + deleted: 0, + deleted_paths: Vec::new(), + errors: Vec::new(), + }; + for path in &sorted_paths { + match delete_empty_directory(Path::new(path)) { + Ok(true) => { + result.deleted += 1; + result.deleted_paths.push(path.clone()); + } + Ok(false) => {} + Err(err) => result.errors.push(DeletionError { + path: path.clone(), + error: err.to_string(), + }), + } + } + result +} + +pub fn delete_targets(files: &[String], dirs: &[String]) -> DeleteTargetsResult { + let file_result = delete_files(files); + let dir_result = delete_directories(dirs); + DeleteTargetsResult { + deleted_files: file_result.deleted_paths, + deleted_dirs: dir_result.deleted_paths, + file_errors: file_result.errors, + dir_errors: dir_result.errors, + } +} + +#[cfg(feature = "napi")] +mod napi_binding { + use napi::bindgen_prelude::*; + use napi_derive::napi; + + use super::DeletionError; + + #[napi] + pub fn get_platform_fixed_dir() -> napi::Result { + super::get_platform_fixed_dir().map_err(|err| napi::Error::from_reason(err.to_string())) + } + + #[napi] + pub fn ensure_dir(path: String) -> napi::Result<()> { + super::ensure_dir(path).map_err(|err| napi::Error::from_reason(err.to_string())) + } + + #[napi] + pub fn exists_sync(path: String) -> bool { + super::exists_sync(path) + } + + #[napi] + pub fn delete_path_sync(path: String) -> napi::Result<()> { + super::delete_path_sync(path).map_err(|err| napi::Error::from_reason(err.to_string())) + } + + #[napi] + pub fn write_file_sync( + path: String, + data: Either, + encoding: Option, + ) -> napi::Result<()> { + if let Some(value) = encoding.as_deref() { + let normalized = value.to_ascii_lowercase(); + if normalized != "utf8" && normalized != "utf-8" { + return Err(napi::Error::from_reason(format!( + "unsupported encoding: {}", + value + ))); + } + } + + let bytes = match data { + Either::A(text) => text.into_bytes(), + Either::B(buffer) => buffer.to_vec(), + }; + super::write_file_sync(path, &bytes) + .map_err(|err| napi::Error::from_reason(err.to_string())) + } + + #[napi] + pub fn read_file_sync(path: String, encoding: Option) -> napi::Result { + if let Some(value) = encoding.as_deref() { + let normalized = value.to_ascii_lowercase(); + if normalized != "utf8" && normalized != "utf-8" { + return Err(napi::Error::from_reason(format!( + "unsupported encoding: {}", + value + ))); + } + } + super::read_file_sync(path).map_err(|err| napi::Error::from_reason(err.to_string())) + } + + #[napi(object)] + pub struct NapiDeletionError { + pub path: String, + pub error: String, + } + + #[napi(object)] + pub struct NapiDeletionResult { + pub deleted: u32, + #[napi(js_name = "deletedPaths")] + pub deleted_paths: Vec, + pub errors: Vec, + } + + #[napi(object)] + pub struct NapiDeleteTargetsResult { + #[napi(js_name = "deletedFiles")] + pub deleted_files: Vec, + #[napi(js_name = "deletedDirs")] + pub deleted_dirs: Vec, + #[napi(js_name = "fileErrors")] + pub file_errors: Vec, + #[napi(js_name = "dirErrors")] + pub dir_errors: Vec, + } + + fn to_napi_error(err: DeletionError) -> NapiDeletionError { + NapiDeletionError { + path: err.path, + error: err.error, + } + } + + #[napi] + pub fn delete_files(paths: Vec) -> NapiDeletionResult { + let result = super::delete_files(&paths); + NapiDeletionResult { + deleted: result.deleted as u32, + deleted_paths: result.deleted_paths, + errors: result.errors.into_iter().map(to_napi_error).collect(), + } + } + + #[napi] + pub fn delete_directories(paths: Vec) -> NapiDeletionResult { + let result = super::delete_directories(&paths); + NapiDeletionResult { + deleted: result.deleted as u32, + deleted_paths: result.deleted_paths, + errors: result.errors.into_iter().map(to_napi_error).collect(), + } + } + + #[napi] + pub fn delete_empty_directories(paths: Vec) -> NapiDeletionResult { + let result = super::delete_empty_directories(&paths); + NapiDeletionResult { + deleted: result.deleted as u32, + deleted_paths: result.deleted_paths, + errors: result.errors.into_iter().map(to_napi_error).collect(), + } + } + + #[napi(object)] + pub struct DeleteTargetsInput { + pub files: Option>, + pub dirs: Option>, + } + + #[napi] + pub fn delete_targets(paths: DeleteTargetsInput) -> NapiDeleteTargetsResult { + let files = paths.files.unwrap_or_default(); + let dirs = paths.dirs.unwrap_or_default(); + let result = super::delete_targets(&files, &dirs); + NapiDeleteTargetsResult { + deleted_files: result.deleted_files, + deleted_dirs: result.deleted_dirs, + file_errors: result.file_errors.into_iter().map(to_napi_error).collect(), + dir_errors: result.dir_errors.into_iter().map(to_napi_error).collect(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::tempdir; + + #[test] + fn delete_targets_batch() { + let dir = tempdir().unwrap(); + let files_dir = dir.path().join("files"); + let dirs_dir = dir.path().join("dirs"); + fs::create_dir_all(&files_dir).unwrap(); + fs::create_dir_all(dirs_dir.join("nested")).unwrap(); + let file = files_dir.join("artifact.txt"); + fs::write(&file, b"data").unwrap(); + let leaf = dirs_dir.join("nested").join("inner.txt"); + fs::write(&leaf, b"payload").unwrap(); + + let result = delete_targets( + &[file.to_string_lossy().into_owned()], + &[dirs_dir.to_string_lossy().into_owned()], + ); + + assert_eq!( + result.deleted_files, + vec![file.to_string_lossy().into_owned()] + ); + assert!( + result + .deleted_dirs + .contains(&dirs_dir.to_string_lossy().into_owned()) + ); + assert!(result.file_errors.is_empty()); + assert!(result.dir_errors.is_empty()); + } + + #[test] + fn delete_empty_directories_only_removes_empty_paths() { + let dir = tempdir().unwrap(); + let parent_dir = dir.path().join("empty-parent"); + let child_dir = parent_dir.join("leaf"); + let non_empty_dir = dir.path().join("non-empty"); + fs::create_dir_all(&child_dir).unwrap(); + fs::create_dir_all(&non_empty_dir).unwrap(); + fs::write(non_empty_dir.join("keep.txt"), b"keep").unwrap(); + + let result = delete_empty_directories(&[ + parent_dir.to_string_lossy().into_owned(), + child_dir.to_string_lossy().into_owned(), + non_empty_dir.to_string_lossy().into_owned(), + ]); + + assert_eq!(result.deleted, 2); + assert_eq!( + result.deleted_paths, + vec![ + child_dir.to_string_lossy().into_owned(), + parent_dir.to_string_lossy().into_owned(), + ] + ); + assert!(result.errors.is_empty()); + assert!(!parent_dir.exists()); + assert!(non_empty_dir.exists()); + } + + #[test] + fn delete_empty_directories_skips_non_empty_and_missing_paths() { + let dir = tempdir().unwrap(); + let target_dir = dir.path().join("maybe-empty"); + fs::create_dir_all(&target_dir).unwrap(); + fs::write(target_dir.join("new-file.txt"), b"late write").unwrap(); + + let result = delete_empty_directories(&[ + target_dir.to_string_lossy().into_owned(), + dir.path().join("missing").to_string_lossy().into_owned(), + ]); + + assert_eq!(result.deleted, 0); + assert!(result.deleted_paths.is_empty()); + assert!(result.errors.is_empty()); + assert!(target_dir.exists()); + } +} diff --git a/sdk/src/core/input_plugins.rs b/sdk/src/core/input_plugins.rs new file mode 100644 index 00000000..6ceb47ea --- /dev/null +++ b/sdk/src/core/input_plugins.rs @@ -0,0 +1,9 @@ +#![deny(clippy::all)] + +//! All 17 input plugins for the tnmsc pipeline. +//! +//! Plugins are grouped by type: +//! - File readers (workspace, gitignore, editorconfig, vscode, jetbrains) +//! - MDX directory scanners (fast-command, sub-agent, rule, global-memory) +//! - Complex plugins (shadow-project, skill, project-prompt, readme) +//! - Effect plugins (md-cleanup, orphan-cleanup, skill-dist-cleanup) diff --git a/sdk/src/core/mod.rs b/sdk/src/core/mod.rs new file mode 100644 index 00000000..5881df2c --- /dev/null +++ b/sdk/src/core/mod.rs @@ -0,0 +1,5 @@ +pub mod cleanup; +pub mod config; +pub mod desk_paths; +pub mod input_plugins; +pub mod plugin_shared; diff --git a/sdk/src/core/native-binding.ts b/sdk/src/core/native-binding.ts new file mode 100644 index 00000000..d761cef8 --- /dev/null +++ b/sdk/src/core/native-binding.ts @@ -0,0 +1,63 @@ +import {createRequire} from 'node:module' +import process from 'node:process' + +function shouldSkipNativeBinding(): boolean { + if (process.env['TNMSC_FORCE_NATIVE_BINDING'] === '1') return false + if (process.env['TNMSC_DISABLE_NATIVE_BINDING'] === '1') return true + + return process.env['NODE_ENV'] === 'test' || process.env['VITEST'] != null || process.env['VITEST_WORKER_ID'] != null +} + +export function tryLoadNativeBinding(): T | undefined { + const testGlobals = globalThis as typeof globalThis & {__TNMSC_TEST_NATIVE_BINDING__?: object} + const testBinding: unknown = testGlobals.__TNMSC_TEST_NATIVE_BINDING__ + if (testBinding != null && typeof testBinding === 'object') return testBinding as T + if (shouldSkipNativeBinding()) return void 0 + + const suffixMap: Readonly> = { + 'win32-x64': 'win32-x64-msvc', + 'linux-x64': 'linux-x64-gnu', + 'linux-arm64': 'linux-arm64-gnu', + 'darwin-arm64': 'darwin-arm64', + 'darwin-x64': 'darwin-x64' + } + const suffix = suffixMap[`${process.platform}-${process.arch}`] + if (suffix == null) return void 0 + + try { + const _require = createRequire(import.meta.url) + const packageName = `@truenine/memory-sync-cli-${suffix}` + const binaryFile = `napi-memory-sync-cli.${suffix}.node` + const candidates = [ + packageName, + `${packageName}/${binaryFile}`, + `./${binaryFile}`, + `../npm/${suffix}`, + `../npm/${suffix}/${binaryFile}`, + `../../npm/${suffix}`, + `../../npm/${suffix}/${binaryFile}` + ] + + for (const specifier of candidates) { + try { + const loaded = _require(specifier) as unknown + const possibleBindings = [ + (loaded as {config?: unknown})?.config, + (loaded as {default?: {config?: unknown}})?.default?.config, + (loaded as {default?: unknown})?.default, + loaded + ] + + for (const candidate of possibleBindings) { + if (candidate != null && typeof candidate === 'object') return candidate as T + } + } catch {} + } + } catch {} + + return void 0 +} + +export function getNativeBinding(): T | undefined { + return tryLoadNativeBinding() +} diff --git a/sdk/src/core/plugin_shared.rs b/sdk/src/core/plugin_shared.rs new file mode 100644 index 00000000..ead00550 --- /dev/null +++ b/sdk/src/core/plugin_shared.rs @@ -0,0 +1,623 @@ +//! Shared types and data structures for tnmsc plugins. +//! +//! Defines `CollectedInputContext`, `RelativePath`, plugin traits, +//! and other types shared between input plugins, CLI, and output runtime. + +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::collections::HashMap; +use std::path::PathBuf; + +// --------------------------------------------------------------------------- +// Enums +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum PluginKind { + Input, + Output, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum PromptKind { + GlobalMemory, + ProjectRootMemory, + ProjectChildrenMemory, + FastCommand, + SubAgent, + Skill, + SkillChildDoc, + SkillResource, + SkillMcpConfig, + Readme, + Rule, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum RuleScope { + Project, + Global, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum FilePathKind { + Relative, + Absolute, + Root, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum IDEKind { + VSCode, + IntellijIDEA, + Git, + EditorConfig, + Original, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum NamingCaseKind { + CamelCase, + PascalCase, + SnakeCase, + KebabCase, + UpperCase, + LowerCase, + Original, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum SkillResourceEncoding { + Text, + Base64, +} + +// --------------------------------------------------------------------------- +// Path types +// --------------------------------------------------------------------------- + +/// Relative path with base path for computing absolute paths. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RelativePath { + pub path_kind: FilePathKind, + pub path: String, + pub base_path: String, + /// Pre-computed absolute path for serialization to Node.js + #[serde(skip_serializing_if = "Option::is_none")] + pub absolute_path: Option, + /// Pre-computed directory name for serialization to Node.js + #[serde(skip_serializing_if = "Option::is_none")] + pub directory_name: Option, +} + +impl RelativePath { + pub fn new(path: &str, base_path: &str) -> Self { + let abs = PathBuf::from(base_path).join(path); + let dir_name = PathBuf::from(path) + .parent() + .map(|p| p.to_string_lossy().into_owned()) + .unwrap_or_default(); + Self { + path_kind: FilePathKind::Relative, + path: path.to_string(), + base_path: base_path.to_string(), + absolute_path: Some(abs.to_string_lossy().into_owned()), + directory_name: Some(dir_name), + } + } + + pub fn get_absolute_path(&self) -> String { + self.absolute_path.clone().unwrap_or_else(|| { + PathBuf::from(&self.base_path) + .join(&self.path) + .to_string_lossy() + .into_owned() + }) + } + + pub fn get_directory_name(&self) -> String { + self.directory_name.clone().unwrap_or_else(|| { + PathBuf::from(&self.path) + .parent() + .map(|p| p.to_string_lossy().into_owned()) + .unwrap_or_default() + }) + } +} + +/// Root path (workspace root). +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RootPath { + pub path_kind: FilePathKind, + pub path: String, +} + +impl RootPath { + pub fn new(path: &str) -> Self { + Self { + path_kind: FilePathKind::Root, + path: path.to_string(), + } + } +} + +// --------------------------------------------------------------------------- +// YAML front matter types +// --------------------------------------------------------------------------- + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct YAMLFrontMatter { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub naming_case: Option, + #[serde(flatten)] + pub extra: HashMap, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CommonYAMLFrontMatter { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub naming_case: Option, + #[serde(flatten)] + pub extra: HashMap, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RuleYAMLFrontMatter { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(default)] + pub globs: Vec, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub scope: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub seri_name: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub naming_case: Option, + #[serde(flatten)] + pub extra: HashMap, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct FastCommandYAMLFrontMatter { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub argument_hint: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub allow_tools: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub naming_case: Option, + #[serde(flatten)] + pub extra: HashMap, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SubAgentYAMLFrontMatter { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub name: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub model: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub color: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub argument_hint: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub allow_tools: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub naming_case: Option, + #[serde(flatten)] + pub extra: HashMap, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SkillYAMLFrontMatter { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub name: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub display_name: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub author: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub version: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub keywords: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub allow_tools: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub naming_case: Option, + #[serde(flatten)] + pub extra: HashMap, +} + +// --------------------------------------------------------------------------- +// Prompt types +// --------------------------------------------------------------------------- + +/// Rule prompt with glob patterns. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RulePrompt { + #[serde(rename = "type")] + pub prompt_type: PromptKind, + pub content: String, + pub length: usize, + pub dir: RelativePath, + pub series: String, + pub rule_name: String, + pub globs: Vec, + pub scope: RuleScope, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub seri_name: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub yaml_front_matter: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub raw_mdx_content: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub markdown_contents: Option>, +} + +/// Fast command prompt. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct FastCommandPrompt { + #[serde(rename = "type")] + pub prompt_type: PromptKind, + pub content: String, + pub length: usize, + pub dir: RelativePath, + pub command_name: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub series: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub global_only: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub yaml_front_matter: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub raw_mdx_content: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub markdown_contents: Option>, +} + +/// Sub-agent prompt. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SubAgentPrompt { + #[serde(rename = "type")] + pub prompt_type: PromptKind, + pub content: String, + pub length: usize, + pub dir: RelativePath, + pub agent_name: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub series: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub yaml_front_matter: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub raw_mdx_content: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub markdown_contents: Option>, +} + +/// Skill child document. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SkillChildDoc { + #[serde(rename = "type")] + pub prompt_type: PromptKind, + pub content: String, + pub length: usize, + pub relative_path: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub markdown_contents: Option>, +} + +/// Skill resource file. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SkillResource { + #[serde(rename = "type")] + pub prompt_type: PromptKind, + pub extension: String, + pub file_name: String, + pub relative_path: String, + pub content: String, + pub encoding: SkillResourceEncoding, + pub length: usize, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub mime_type: Option, +} + +/// MCP server configuration entry. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct McpServerConfig { + pub command: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub args: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub env: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub disabled: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub auto_approve: Option>, +} + +/// Skill MCP configuration (mcp.json). +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SkillMcpConfig { + #[serde(rename = "type")] + pub prompt_type: PromptKind, + pub mcp_servers: HashMap, + pub raw_content: String, +} + +/// Skill prompt. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SkillPrompt { + #[serde(rename = "type")] + pub prompt_type: PromptKind, + pub content: String, + pub length: usize, + pub dir: RelativePath, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub yaml_front_matter: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub mcp_config: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub child_docs: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub resources: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub markdown_contents: Option>, +} + +/// Global memory prompt. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GlobalMemoryPrompt { + #[serde(rename = "type")] + pub prompt_type: PromptKind, + pub content: String, + pub length: usize, + pub dir: RelativePath, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub markdown_contents: Option>, +} + +/// Readme prompt. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ReadmePrompt { + #[serde(rename = "type")] + pub prompt_type: PromptKind, + pub content: String, + pub length: usize, + pub dir: RelativePath, + pub project_name: String, + pub target_dir: RelativePath, + pub is_root: bool, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub markdown_contents: Option>, +} + +// --------------------------------------------------------------------------- +// IDE config types +// --------------------------------------------------------------------------- + +/// IDE configuration file. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProjectIDEConfigFile { + #[serde(rename = "type")] + pub ide_type: IDEKind, + pub content: String, + pub length: usize, + pub dir: RelativePath, + pub file_path_kind: FilePathKind, +} + +// --------------------------------------------------------------------------- +// Project & Workspace +// --------------------------------------------------------------------------- + +/// Project within a workspace. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Project { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub name: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub dir_from_workspace_path: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub root_memory_prompt: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub child_memory_prompts: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub is_prompt_source_project: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub project_config: Option, +} + +/// Workspace containing projects. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Workspace { + pub directory: RootPath, + #[serde(default)] + pub projects: Vec, +} + +// --------------------------------------------------------------------------- +// CollectedInputContext — the main bridge type +// --------------------------------------------------------------------------- + +/// All collected input information, serialized from Rust to Node.js output runtime. +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CollectedInputContext { + #[serde(default, skip_serializing_if = "Option::is_none")] + pub workspace: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub vscode_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub jetbrains_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub editor_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub fast_commands: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub sub_agents: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub skills: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub rules: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub global_memory: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub global_git_ignore: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub shadow_git_exclude: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub shadow_source_project_dir: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub readme_prompts: Option>, +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_relative_path() { + let rp = RelativePath::new("src/skills/test.mdx", "/home/user/workspace/aindex"); + assert_eq!(rp.path, "src/skills/test.mdx"); + assert_eq!(rp.base_path, "/home/user/workspace/aindex"); + assert!(rp.get_absolute_path().contains("src/skills/test.mdx")); + assert_eq!(rp.get_directory_name(), "src/skills"); + } + + #[test] + fn test_collected_input_context_default() { + let ctx = CollectedInputContext::default(); + assert!(ctx.workspace.is_none()); + assert!(ctx.fast_commands.is_none()); + } + + #[test] + fn test_collected_input_context_serialize() { + let ctx = CollectedInputContext { + workspace: Some(Workspace { + directory: RootPath::new("/workspace"), + projects: vec![], + }), + global_git_ignore: Some("node_modules/\n".to_string()), + ..Default::default() + }; + let json = serde_json::to_string(&ctx).unwrap(); + assert!(json.contains("workspace")); + assert!(json.contains("globalGitIgnore")); + // Fields that are None should not appear + assert!(!json.contains("fastCommands")); + } + + #[test] + fn test_collected_input_context_roundtrip() { + let ctx = CollectedInputContext { + workspace: Some(Workspace { + directory: RootPath::new("/workspace"), + projects: vec![Project { + name: Some("test-project".into()), + ..Default::default() + }], + }), + fast_commands: Some(vec![FastCommandPrompt { + prompt_type: PromptKind::FastCommand, + content: "# Test Command\n\nDo something.".into(), + length: 30, + dir: RelativePath::new("commands/test.mdx", "/workspace/aindex/dist"), + command_name: "test".into(), + series: Some("default".into()), + global_only: None, + yaml_front_matter: Some(FastCommandYAMLFrontMatter { + description: Some("A test command".into()), + ..Default::default() + }), + raw_mdx_content: None, + markdown_contents: None, + }]), + ..Default::default() + }; + + let json = serde_json::to_string_pretty(&ctx).unwrap(); + let parsed: CollectedInputContext = serde_json::from_str(&json).unwrap(); + assert_eq!(parsed.workspace.as_ref().unwrap().projects.len(), 1); + assert_eq!(parsed.fast_commands.as_ref().unwrap().len(), 1); + assert_eq!( + parsed.fast_commands.as_ref().unwrap()[0].command_name, + "test" + ); + } + + #[test] + fn test_rule_prompt_serialize() { + let rule = RulePrompt { + prompt_type: PromptKind::Rule, + content: "# Rule\n\nDo this.".into(), + length: 17, + dir: RelativePath::new("rules/default/test.mdx", "/workspace/aindex/dist"), + series: "default".into(), + rule_name: "test".into(), + globs: vec!["**/*.ts".into(), "**/*.tsx".into()], + scope: RuleScope::Project, + seri_name: None, + yaml_front_matter: None, + raw_mdx_content: None, + markdown_contents: None, + }; + let json = serde_json::to_string(&rule).unwrap(); + assert!(json.contains("\"type\":\"Rule\"")); + assert!(json.contains("\"globs\"")); + } + + #[test] + fn test_enums_serialize() { + assert_eq!( + serde_json::to_string(&PromptKind::FastCommand).unwrap(), + "\"FastCommand\"" + ); + assert_eq!( + serde_json::to_string(&RuleScope::Global).unwrap(), + "\"global\"" + ); + assert_eq!( + serde_json::to_string(&IDEKind::VSCode).unwrap(), + "\"VSCode\"" + ); + assert_eq!( + serde_json::to_string(&SkillResourceEncoding::Base64).unwrap(), + "\"base64\"" + ); + } +} diff --git a/sdk/src/diagnostic_helpers.rs b/sdk/src/diagnostic_helpers.rs new file mode 100644 index 00000000..c01bc7f9 --- /dev/null +++ b/sdk/src/diagnostic_helpers.rs @@ -0,0 +1,32 @@ +use serde_json::{Map, Value}; +use tnmsc_logger::LoggerDiagnosticInput; + +pub(crate) fn line(value: impl Into) -> Vec { + vec![value.into()] +} + +pub(crate) fn diagnostic( + code: impl Into, + title: impl Into, + root_cause: Vec, + exact_fix: Option>, + possible_fixes: Option>>, + details: Option>, +) -> LoggerDiagnosticInput { + LoggerDiagnosticInput { + code: code.into(), + title: title.into(), + root_cause, + exact_fix, + possible_fixes, + details, + } +} + +pub(crate) fn optional_details(value: Value) -> Option> { + match value { + Value::Object(map) if !map.is_empty() => Some(map), + Value::Object(_) => None, + _ => None, + } +} diff --git a/sdk/src/diagnostics.test.ts b/sdk/src/diagnostics.test.ts new file mode 100644 index 00000000..053d1ec9 --- /dev/null +++ b/sdk/src/diagnostics.test.ts @@ -0,0 +1,54 @@ +import {describe, expect, it} from 'vitest' +import {buildFileOperationDiagnostic} from './diagnostics' + +describe('buildFileOperationDiagnostic', () => { + it('emits Windows-specific cleanup guidance for EPERM directory deletions', () => { + const diagnostic = buildFileOperationDiagnostic({ + code: 'CLEANUP_DIRECTORY_DELETE_FAILED', + title: 'Cleanup could not delete a directory', + operation: 'delete', + targetKind: 'directory', + path: 'C:\\workspace\\.opencode\\skills', + error: 'EPERM, Permission denied: \\\\?\\C:\\workspace\\.opencode\\skills', + platform: 'win32' + }) + + expect(diagnostic.exactFix).toEqual([ + 'Close any process that is using "C:\\workspace\\.opencode\\skills", delete the stale directory, and rerun tnmsc.', + 'Common lockers on Windows include editors, terminals, antivirus scanners, sync clients, and AI tools watching generated files.' + ]) + expect(diagnostic.possibleFixes).toEqual([ + ['Use Resource Monitor or Process Explorer to find which process holds a handle under "C:\\workspace\\.opencode\\skills".'], + ['Make sure no shell, editor tab, or file watcher is currently opened inside "C:\\workspace\\.opencode\\skills" or one of its children.'], + ['If antivirus or cloud sync is scanning generated outputs, wait for it to release the directory or exclude this output path.'] + ]) + expect(diagnostic.details).toMatchObject({ + platform: 'win32', + errorMessage: 'EPERM, Permission denied: \\\\?\\C:\\workspace\\.opencode\\skills' + }) + }) + + it('keeps generic guidance for non-Windows or non-permission errors', () => { + const diagnostic = buildFileOperationDiagnostic({ + code: 'OUTPUT_FILE_WRITE_FAILED', + title: 'Failed to write output', + operation: 'write', + targetKind: 'file', + path: '/tmp/output.md', + error: 'ENOENT: no such file or directory', + platform: 'linux' + }) + + expect(diagnostic.exactFix).toEqual([ + 'Verify that "/tmp/output.md" exists, has the expected type, and is accessible to tnmsc.' + ]) + expect(diagnostic.possibleFixes).toEqual([ + ['Check file permissions and ownership for the target path.'], + ['Confirm that another process did not delete, move, or lock the target path.'] + ]) + expect(diagnostic.details).toMatchObject({ + platform: 'linux', + errorMessage: 'ENOENT: no such file or directory' + }) + }) +}) diff --git a/sdk/src/diagnostics.ts b/sdk/src/diagnostics.ts new file mode 100644 index 00000000..15634a9b --- /dev/null +++ b/sdk/src/diagnostics.ts @@ -0,0 +1,415 @@ +import type { + DiagnosticLines, + LoggerDiagnosticInput, + LoggerDiagnosticRecord +} from './plugins/plugin-core' +import type {ProtectedPathViolation} from './ProtectedDeletionGuard' +import process from 'node:process' + +export function diagnosticLines(firstLine: string, ...otherLines: string[]): DiagnosticLines { + return [firstLine, ...otherLines] +} + +export function toErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error) +} + +export function splitDiagnosticText(text: string): DiagnosticLines { + const lines = text + .split(/\r?\n/u) + .map(line => line.trimEnd()) + .filter(line => line.length > 0) + + if (lines.length === 0) return diagnosticLines('No diagnostic details were provided.') + const [firstLine, ...otherLines] = lines + if (firstLine == null) return diagnosticLines('No diagnostic details were provided.') + return diagnosticLines(firstLine, ...otherLines) +} + +export function buildDiagnostic(input: LoggerDiagnosticInput): LoggerDiagnosticInput { + return input +} + +interface DiagnosticFailure { + readonly path: string + readonly error: unknown + readonly details?: Record | undefined +} + +interface FileOperationDiagnosticOptions { + readonly code: string + readonly title: string + readonly operation: string + readonly targetKind: string + readonly path: string + readonly error: unknown + readonly platform?: NodeJS.Platform | undefined + readonly exactFix?: DiagnosticLines | undefined + readonly possibleFixes?: readonly DiagnosticLines[] | undefined + readonly details?: Record | undefined +} + +interface FileOperationAdvice { + readonly exactFix: DiagnosticLines + readonly possibleFixes: readonly DiagnosticLines[] +} + +function normalizeErrorMessage(error: unknown): string { + return toErrorMessage(error).toLowerCase() +} + +function isWindowsDirectoryDeletePermissionDenied(options: { + readonly operation: string + readonly targetKind: string + readonly error: unknown + readonly platform: NodeJS.Platform +}): boolean { + if (options.platform !== 'win32') return false + if (options.operation !== 'delete') return false + if (options.targetKind !== 'directory') return false + + const normalizedError = normalizeErrorMessage(options.error) + return normalizedError.includes('eperm') || normalizedError.includes('permission denied') +} + +function buildFileOperationAdvice(options: { + readonly operation: string + readonly targetKind: string + readonly path: string + readonly error: unknown + readonly platform: NodeJS.Platform +}): FileOperationAdvice { + if (isWindowsDirectoryDeletePermissionDenied(options)) { + return { + exactFix: diagnosticLines( + `Close any process that is using "${options.path}", delete the stale directory, and rerun tnmsc.`, + `Common lockers on Windows include editors, terminals, antivirus scanners, sync clients, and AI tools watching generated files.` + ), + possibleFixes: [ + diagnosticLines( + `Use Resource Monitor or Process Explorer to find which process holds a handle under "${options.path}".` + ), + diagnosticLines( + `Make sure no shell, editor tab, or file watcher is currently opened inside "${options.path}" or one of its children.` + ), + diagnosticLines( + `If antivirus or cloud sync is scanning generated outputs, wait for it to release the directory or exclude this output path.` + ) + ] + } + } + + return { + exactFix: diagnosticLines( + `Verify that "${options.path}" exists, has the expected type, and is accessible to tnmsc.` + ), + possibleFixes: [ + diagnosticLines('Check file permissions and ownership for the target path.'), + diagnosticLines('Confirm that another process did not delete, move, or lock the target path.') + ] + } +} + +export function buildFileOperationDiagnostic(options: FileOperationDiagnosticOptions): LoggerDiagnosticInput { + const { + code, + title, + operation, + targetKind, + path, + error, + platform, + exactFix, + possibleFixes, + details + } = options + const errorMessage = toErrorMessage(error) + const advice = buildFileOperationAdvice({ + operation, + targetKind, + path, + error, + platform: platform ?? process.platform + }) + + return buildDiagnostic({ + code, + title, + rootCause: diagnosticLines( + `tnmsc could not ${operation} the ${targetKind} at "${path}".`, + `Underlying error: ${errorMessage}` + ), + exactFix: exactFix ?? advice.exactFix, + possibleFixes: possibleFixes ?? advice.possibleFixes, + details: { + operation, + targetKind, + path, + errorMessage, + platform: platform ?? process.platform, + ...details ?? {} + } + }) +} + +interface BatchFileOperationDiagnosticOptions { + readonly code: string + readonly title: string + readonly operation: string + readonly targetKind: string + readonly failures: readonly DiagnosticFailure[] + readonly exactFix?: DiagnosticLines | undefined + readonly possibleFixes?: readonly DiagnosticLines[] | undefined + readonly details?: Record | undefined +} + +export function buildBatchFileOperationDiagnostic(options: BatchFileOperationDiagnosticOptions): LoggerDiagnosticInput { + const { + code, + title, + operation, + targetKind, + failures, + exactFix, + possibleFixes, + details + } = options + const firstFailure = failures[0] + const firstFailureLine = firstFailure == null + ? 'No failing path details were captured.' + : `First failure: "${firstFailure.path}" -> ${toErrorMessage(firstFailure.error)}` + + return buildDiagnostic({ + code, + title, + rootCause: diagnosticLines( + `tnmsc encountered ${failures.length} failed ${operation} operation(s) while handling ${targetKind}.`, + firstFailureLine + ), + exactFix: exactFix ?? diagnosticLines( + `Inspect the failing ${targetKind} path and correct the underlying ${operation} problem before retrying tnmsc.` + ), + possibleFixes: possibleFixes ?? [ + diagnosticLines('Verify the target path exists, has the expected type, and is accessible to tnmsc.'), + diagnosticLines('Check whether another process deleted, moved, or locked the target path.') + ], + details: { + operation, + targetKind, + failures: failures.map(failure => ({ + path: failure.path, + errorMessage: toErrorMessage(failure.error), + ...failure.details ?? {} + })), + ...details ?? {} + } + }) +} + +interface ConfigDiagnosticOptions { + readonly code: string + readonly title: string + readonly reason: DiagnosticLines + readonly configPath?: string | undefined + readonly exactFix?: DiagnosticLines | undefined + readonly possibleFixes?: readonly DiagnosticLines[] | undefined + readonly details?: Record | undefined +} + +export function buildConfigDiagnostic(options: ConfigDiagnosticOptions): LoggerDiagnosticInput { + const { + code, + title, + reason, + configPath, + exactFix, + possibleFixes, + details + } = options + + return buildDiagnostic({ + code, + title, + rootCause: configPath == null + ? reason + : diagnosticLines(reason[0], ...reason.slice(1), `Config path: ${configPath}`), + exactFix, + possibleFixes, + details: { + ...configPath != null ? {configPath} : {}, + ...details ?? {} + } + }) +} + +interface UsageDiagnosticOptions { + readonly code: string + readonly title: string + readonly rootCause: DiagnosticLines + readonly exactFix?: DiagnosticLines | undefined + readonly possibleFixes?: readonly DiagnosticLines[] | undefined + readonly details?: Record | undefined +} + +export function buildUsageDiagnostic(options: UsageDiagnosticOptions): LoggerDiagnosticInput { + return buildDiagnostic(options) +} + +interface PathStateDiagnosticOptions { + readonly code: string + readonly title: string + readonly path: string + readonly expectedKind: string + readonly actualState: string + readonly exactFix?: DiagnosticLines | undefined + readonly possibleFixes?: readonly DiagnosticLines[] | undefined + readonly details?: Record | undefined +} + +export function buildPathStateDiagnostic(options: PathStateDiagnosticOptions): LoggerDiagnosticInput { + const { + code, + title, + path, + expectedKind, + actualState, + exactFix, + possibleFixes, + details + } = options + + return buildDiagnostic({ + code, + title, + rootCause: diagnosticLines( + `tnmsc expected a ${expectedKind} at "${path}".`, + `Actual state: ${actualState}` + ), + exactFix: exactFix ?? diagnosticLines( + `Create or replace "${path}" with a valid ${expectedKind} before retrying tnmsc.` + ), + possibleFixes: possibleFixes ?? [ + diagnosticLines('Check whether the path was moved, deleted, or replaced with the wrong file type.'), + diagnosticLines('Update your configuration so tnmsc points to the intended source path.') + ], + details: { + path, + expectedKind, + actualState, + ...details ?? {} + } + }) +} + +interface PromptCompilerDiagnosticOptions { + readonly code: string + readonly title: string + readonly diagnosticText: string + readonly exactFix?: DiagnosticLines | undefined + readonly possibleFixes?: readonly DiagnosticLines[] | undefined + readonly details?: Record | undefined +} + +export function buildPromptCompilerDiagnostic(options: PromptCompilerDiagnosticOptions): LoggerDiagnosticInput { + const { + code, + title, + diagnosticText, + exactFix, + possibleFixes, + details + } = options + + const summaryLines = splitDiagnosticText(diagnosticText) + + return buildDiagnostic({ + code, + title, + rootCause: summaryLines, + exactFix: exactFix ?? diagnosticLines( + 'Fix the referenced prompt source or compiled dist file so the compiler diagnostic no longer triggers.' + ), + possibleFixes: possibleFixes ?? [ + diagnosticLines('Open the file referenced in the diagnostic and correct the reported syntax or metadata issue.'), + diagnosticLines('Rebuild the prompt output so the dist file matches the current source tree.') + ], + details: { + diagnosticText, + ...details ?? {} + } + }) +} + +export function buildProtectedDeletionDiagnostic( + operation: string, + violations: readonly ProtectedPathViolation[] +): LoggerDiagnosticInput { + const firstViolation = violations[0] + + return buildDiagnostic({ + code: 'PROTECTED_DELETION_GUARD_TRIGGERED', + title: 'Protected deletion guard blocked a destructive operation', + rootCause: diagnosticLines( + `The "${operation}" operation targeted ${violations.length} protected path(s).`, + firstViolation != null + ? `Example protected path: ${firstViolation.protectedPath}` + : 'No violation details were captured.' + ), + exactFix: diagnosticLines( + 'Remove protected inputs or reserved workspace paths from the delete plan before running tnmsc again.' + ), + possibleFixes: [ + diagnosticLines('Update cleanup declarations so they only target generated output paths.'), + diagnosticLines('Move source inputs outside of the cleanup target set if they are currently overlapping.') + ], + details: { + operation, + count: violations.length, + violations: violations.map(violation => ({ + targetPath: violation.targetPath, + protectedPath: violation.protectedPath, + protectionMode: violation.protectionMode, + source: violation.source, + reason: violation.reason + })) + } + }) +} + +export function buildUnhandledExceptionDiagnostic(context: string, error: unknown): LoggerDiagnosticInput { + const errorMessage = toErrorMessage(error) + + return buildDiagnostic({ + code: 'UNHANDLED_EXCEPTION', + title: `Unhandled exception in ${context}`, + rootCause: diagnosticLines( + `tnmsc terminated because an unhandled exception escaped the ${context} flow.`, + `Underlying error: ${errorMessage}` + ), + exactFix: diagnosticLines( + 'Inspect the error context and add the missing guard, validation, or recovery path before retrying the command.' + ), + possibleFixes: [ + diagnosticLines('Re-run the command with the same inputs after fixing the referenced file or configuration.'), + diagnosticLines('Add a focused test that reproduces this failure so the regression stays covered.') + ], + details: { + context, + errorMessage + } + }) +} + +export function partitionBufferedDiagnostics( + diagnostics: readonly LoggerDiagnosticRecord[] +): {warnings: LoggerDiagnosticRecord[], errors: LoggerDiagnosticRecord[]} { + const warnings: LoggerDiagnosticRecord[] = [] + const errors: LoggerDiagnosticRecord[] = [] + + for (const diagnostic of diagnostics) { + if (diagnostic.level === 'warn') warnings.push(diagnostic) + else errors.push(diagnostic) + } + + return {warnings, errors} +} diff --git a/sdk/src/globals.ts b/sdk/src/globals.ts new file mode 100644 index 00000000..4622248d --- /dev/null +++ b/sdk/src/globals.ts @@ -0,0 +1 @@ +export * from '@truenine/md-compiler/globals' diff --git a/sdk/src/index.test.ts b/sdk/src/index.test.ts new file mode 100644 index 00000000..0727ccea --- /dev/null +++ b/sdk/src/index.test.ts @@ -0,0 +1,11 @@ +import {describe, expect, it} from 'vitest' + +describe('library entrypoint', () => { + it('can be imported without executing the CLI runtime', async () => { + const mod = await import('./index') + + expect(typeof mod.runCli).toBe('function') + expect(typeof mod.createDefaultPluginConfig).toBe('function') + expect(typeof mod.listPrompts).toBe('function') + }) +}) diff --git a/sdk/src/index.ts b/sdk/src/index.ts new file mode 100644 index 00000000..4b8dd474 --- /dev/null +++ b/sdk/src/index.ts @@ -0,0 +1,14 @@ +export * from './Aindex' +export * from './cli-runtime' +export * from './config' +export * from './ConfigLoader' +export { + createDefaultPluginConfig +} from './plugin.config' +export * from './PluginPipeline' +export { + DEFAULT_USER_CONFIG, + PathPlaceholders +} from './plugins/plugin-core' + +export * from './prompts' diff --git a/sdk/src/inputs/AbstractInputCapability.ts b/sdk/src/inputs/AbstractInputCapability.ts new file mode 100644 index 00000000..244400da --- /dev/null +++ b/sdk/src/inputs/AbstractInputCapability.ts @@ -0,0 +1,186 @@ +import type {ParsedMarkdown} from '@truenine/md-compiler/markdown' +import type { + InputCapability, + InputCapabilityContext, + InputCollectedContext, + InputEffectContext, + InputEffectHandler, + InputEffectRegistration, + InputEffectResult, + PluginOptions, + PluginScopeRegistration, + ResolvedBasePaths, + YAMLFrontMatter +} from '@/plugins/plugin-core' + +import {spawn} from 'node:child_process' +import * as path from 'node:path' +import {createLogger} from '@truenine/logger' +import {parseMarkdown} from '@truenine/md-compiler/markdown' +import {buildDiagnostic, diagnosticLines} from '@/diagnostics' +import {PathPlaceholders} from '@/plugins/plugin-core' +import {logProtectedDeletionGuardError, ProtectedDeletionGuardError} from '@/ProtectedDeletionGuard' +import {resolveUserPath} from '@/runtime-environment' + +export abstract class AbstractInputCapability implements InputCapability { + private readonly inputEffects: InputEffectRegistration[] = [] + + private readonly registeredScopes: PluginScopeRegistration[] = [] + + readonly name: string + + readonly dependsOn?: readonly string[] + + private _log?: import('@truenine/logger').ILogger + + get log(): import('@truenine/logger').ILogger { + this._log ??= createLogger(this.name) + return this._log + } + + protected constructor(name: string, dependsOn?: readonly string[]) { + this.name = name + if (dependsOn != null) this.dependsOn = dependsOn + } + + protected registerEffect(name: string, handler: InputEffectHandler, priority: number = 0): void { + this.inputEffects.push({name, handler, priority}) + this.inputEffects.sort((a, b) => (a.priority ?? 0) - (b.priority ?? 0)) // Sort by priority (lower = earlier) + } + + async executeEffects(ctx: InputCapabilityContext, dryRun: boolean = false): Promise { + const results: InputEffectResult[] = [] + + if (this.inputEffects.length === 0) return results + + const {workspaceDir, aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) + + const effectCtx: InputEffectContext = { + logger: this.log, + fs: ctx.fs, + path: ctx.path, + glob: ctx.glob, + spawn, + userConfigOptions: ctx.userConfigOptions, + workspaceDir, + aindexDir, + dryRun + } + + for (const effect of this.inputEffects) { + if (dryRun) { + this.log.trace({action: 'dryRun', type: 'inputEffect', name: effect.name}) + results.push({success: true, description: `Would execute input effect: ${effect.name}`}) + continue + } + + try { + const result = await effect.handler(effectCtx) + if (result.success) { + this.log.trace({action: 'inputEffect', name: effect.name, status: 'success', description: result.description}) + if (result.modifiedFiles != null && result.modifiedFiles.length > 0) { + this.log.debug({action: 'inputEffect', name: effect.name, modifiedFileCount: result.modifiedFiles.length}) + } + if (result.deletedFiles != null && result.deletedFiles.length > 0) { + this.log.debug({action: 'inputEffect', name: effect.name, deletedFileCount: result.deletedFiles.length}) + } + } else { + const error = result.error ?? new Error(`Input effect failed: ${effect.name}`) + throw error + } + results.push(result) + } + catch (error) { + const effectError = error instanceof Error ? error : new Error(String(error)) + this.logInputEffectFailure(effect.name, effectError) + results.push({success: false, error: effectError, description: `Input effect failed: ${effect.name}`}) + throw effectError + } + } + + return results + } + + private logInputEffectFailure(effectName: string, error: Error): void { + if (error instanceof ProtectedDeletionGuardError) { + logProtectedDeletionGuardError(this.log, error.operation, error.violations) + return + } + + this.log.error(buildDiagnostic({ + code: 'INPUT_EFFECT_FAILED', + title: `Input effect failed: ${effectName}`, + rootCause: diagnosticLines( + `The input effect "${effectName}" failed before tnmsc could finish preprocessing.`, + `Underlying error: ${error.message}` + ), + exactFix: diagnosticLines( + 'Inspect the effect inputs and fix the failing file, path, or environment condition before retrying tnmsc.' + ), + possibleFixes: [ + diagnosticLines('Re-run the command after fixing the referenced path or generated artifact.'), + diagnosticLines('Add a focused regression test if this effect should handle the failure more gracefully.') + ], + details: { + effectName, + errorMessage: error.message + } + })) + } + + hasEffects(): boolean { + return this.inputEffects.length > 0 + } + + getEffectCount(): number { + return this.inputEffects.length + } + + protected registerScope(namespace: string, values: Record): void { + this.registeredScopes.push({namespace, values}) + this.log.debug({action: 'registerScope', namespace, keys: Object.keys(values)}) + } + + getRegisteredScopes(): readonly PluginScopeRegistration[] { + return this.registeredScopes + } + + protected clearRegisteredScopes(): void { + this.registeredScopes.length = 0 + this.log.debug({action: 'clearRegisteredScopes'}) + } + + abstract collect(ctx: InputCapabilityContext): Partial | Promise> + + protected resolveBasePaths(options: Required): ResolvedBasePaths { + const workspaceDirRaw = options.workspaceDir + const workspaceDir = this.resolvePath(workspaceDirRaw, '') + + const aindexDirName = options.aindex?.dir ?? 'aindex' // 从配置读取 aindex 目录名,默认为 'aindex' + const aindexDir = path.join(workspaceDir, aindexDirName) + + return {workspaceDir, aindexDir} + } + + protected resolvePath(rawPath: string, workspaceDir: string): string { + let resolved = rawPath + + if (resolved.includes(PathPlaceholders.WORKSPACE)) resolved = resolved.replace(PathPlaceholders.WORKSPACE, workspaceDir) + + if (resolved.startsWith(PathPlaceholders.USER_HOME)) return resolveUserPath(resolved) + + return path.normalize(resolveUserPath(resolved)) + } + + protected resolveAindexPath(relativePath: string, aindexDir: string): string { + return path.join(aindexDir, relativePath) + } + + protected readAndParseMarkdown( + filePath: string, + fs: typeof import('node:fs') + ): ParsedMarkdown { + const rawContent = fs.readFileSync(filePath, 'utf8') + return parseMarkdown(rawContent) + } +} diff --git a/sdk/src/inputs/effect-md-cleanup.ts b/sdk/src/inputs/effect-md-cleanup.ts new file mode 100644 index 00000000..02a02575 --- /dev/null +++ b/sdk/src/inputs/effect-md-cleanup.ts @@ -0,0 +1,166 @@ +import type { + InputCapabilityContext, + InputCollectedContext, + InputEffectContext, + InputEffectResult +} from '../plugins/plugin-core' +import {resolveAindexProjectSeriesConfigs} from '@/aindex-project-series' +import {buildFileOperationDiagnostic} from '@/diagnostics' +import {AbstractInputCapability} from '../plugins/plugin-core' + +export interface WhitespaceCleanupEffectResult extends InputEffectResult { + readonly modifiedFiles: string[] + readonly skippedFiles: string[] +} + +export class MarkdownWhitespaceCleanupEffectInputCapability extends AbstractInputCapability { + constructor() { + super('MarkdownWhitespaceCleanupEffectInputCapability') + this.registerEffect('markdown-whitespace-cleanup', this.cleanupWhitespace.bind(this), 30) + } + + private async cleanupWhitespace(ctx: InputEffectContext): Promise { + const {fs, path, aindexDir, dryRun, logger, userConfigOptions} = ctx + + const modifiedFiles: string[] = [] + const skippedFiles: string[] = [] + const errors: {path: string, error: Error}[] = [] + const projectSeriesDirs = resolveAindexProjectSeriesConfigs(userConfigOptions) + .map(series => path.join(aindexDir, series.src)) + + const dirsToScan = [ + path.join(aindexDir, 'src'), + ...projectSeriesDirs, + path.join(aindexDir, 'dist') + ] + + for (const dir of dirsToScan) { + if (!fs.existsSync(dir)) { + logger.debug({action: 'whitespace-cleanup', message: 'Directory does not exist, skipping', dir}) + continue + } + + this.processDirectory(ctx, dir, modifiedFiles, skippedFiles, errors, dryRun ?? false) + } + + const hasErrors = errors.length > 0 + + return { + success: !hasErrors, + description: dryRun + ? `Would modify ${modifiedFiles.length} files, skip ${skippedFiles.length} files` + : `Modified ${modifiedFiles.length} files, skipped ${skippedFiles.length} files`, + modifiedFiles, + skippedFiles, + ...hasErrors && {error: new Error(`${errors.length} errors occurred during cleanup`)} + } + } + + private processDirectory( + ctx: InputEffectContext, + dir: string, + modifiedFiles: string[], + skippedFiles: string[], + errors: {path: string, error: Error}[], + dryRun: boolean + ): void { + const {fs, path, logger} = ctx + + let entries: import('node:fs').Dirent[] + try { + entries = fs.readdirSync(dir, {withFileTypes: true}) + } + catch (error) { + errors.push({path: dir, error: error as Error}) + logger.warn(buildFileOperationDiagnostic({ + code: 'WHITESPACE_CLEANUP_DIRECTORY_READ_FAILED', + title: 'Whitespace cleanup could not read a directory', + operation: 'read', + targetKind: 'cleanup directory', + path: dir, + error + })) + return + } + + for (const entry of entries) { + const entryPath = path.join(dir, entry.name) + + if (entry.isDirectory()) this.processDirectory(ctx, entryPath, modifiedFiles, skippedFiles, errors, dryRun) + else if (entry.isFile() && entry.name.endsWith('.md')) this.processMarkdownFile(ctx, entryPath, modifiedFiles, skippedFiles, errors, dryRun) + } + } + + private processMarkdownFile( + ctx: InputEffectContext, + filePath: string, + modifiedFiles: string[], + skippedFiles: string[], + errors: {path: string, error: Error}[], + dryRun: boolean + ): void { + const {fs, logger} = ctx + + try { + const originalContent = fs.readFileSync(filePath, 'utf8') + const cleanedContent = this.cleanMarkdownContent(originalContent) + + if (originalContent === cleanedContent) { + skippedFiles.push(filePath) + logger.debug({action: 'whitespace-cleanup', skipped: filePath, reason: 'no changes needed'}) + return + } + + if (dryRun) { + logger.debug({action: 'whitespace-cleanup', dryRun: true, wouldModify: filePath}) + modifiedFiles.push(filePath) + } else { + fs.writeFileSync(filePath, cleanedContent, 'utf8') + modifiedFiles.push(filePath) + logger.debug({action: 'whitespace-cleanup', modified: filePath}) + } + } + catch (error) { + errors.push({path: filePath, error: error as Error}) + logger.warn(buildFileOperationDiagnostic({ + code: 'WHITESPACE_CLEANUP_FILE_PROCESS_FAILED', + title: 'Whitespace cleanup could not process a markdown file', + operation: 'process', + targetKind: 'markdown file', + path: filePath, + error + })) + } + } + + cleanMarkdownContent(content: string): string { + const lineEnding = this.detectLineEnding(content) + const lines = content.split(/\r?\n/) + const trimmedLines = lines.map(line => line.replace(/[ \t]+$/, '')) + + const result: string[] = [] + let consecutiveBlankCount = 0 + + for (const line of trimmedLines) { + if (line === '') { + consecutiveBlankCount++ + if (consecutiveBlankCount <= 2) result.push(line) + } else { + consecutiveBlankCount = 0 + result.push(line) + } + } + + return result.join(lineEnding) + } + + detectLineEnding(content: string): '\r\n' | '\n' { + if (content.includes('\r\n')) return '\r\n' + return '\n' + } + + collect(ctx: InputCapabilityContext): Partial { + void ctx + return {} + } +} diff --git a/sdk/src/inputs/effect-orphan-cleanup.test.ts b/sdk/src/inputs/effect-orphan-cleanup.test.ts new file mode 100644 index 00000000..4e79454f --- /dev/null +++ b/sdk/src/inputs/effect-orphan-cleanup.test.ts @@ -0,0 +1,249 @@ +import type {InputCapabilityContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import {OrphanFileCleanupEffectInputCapability} from './effect-orphan-cleanup' + +const legacySourceExtension = '.cn.mdx' + +function createContext(tempWorkspace: string): InputCapabilityContext { + const options = mergeConfig({workspaceDir: tempWorkspace}) + + return { + logger: createLogger('OrphanFileCleanupEffectInputCapabilityTest', 'error'), + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputCapabilityContext +} + +class TestOrphanFileCleanupEffectInputCapability extends OrphanFileCleanupEffectInputCapability { + constructor(private readonly planFactory: (ctx: ReturnType & {readonly fs: typeof fs, readonly path: typeof path}) => { + filesToDelete: string[] + dirsToDelete: string[] + errors: {path: string, error: Error}[] + }) { + super() + } + + protected override buildDeletionPlan(ctx: Parameters[0]): { + filesToDelete: string[] + dirsToDelete: string[] + errors: {path: string, error: Error}[] + } { + const basePaths = this.resolveBasePaths(ctx.userConfigOptions) + return this.planFactory({...basePaths, fs: ctx.fs, path: ctx.path}) + } +} + +describe('orphan file cleanup effect', () => { + it('keeps dist command files when a matching .src.mdx source exists', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-test-')) + const srcDir = path.join(tempWorkspace, 'aindex', 'commands') + const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'commands') + const distFile = path.join(distDir, 'demo.mdx') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync(path.join(srcDir, 'demo.src.mdx'), '---\ndescription: source\n---\nSource prompt', 'utf8') + fs.writeFileSync(distFile, 'Compiled prompt', 'utf8') + + const plugin = new OrphanFileCleanupEffectInputCapability() + const [result] = await plugin.executeEffects(createContext(tempWorkspace)) + + expect(result?.success).toBe(true) + expect(fs.existsSync(distFile)).toBe(true) + expect(result?.deletedFiles ?? []).toHaveLength(0) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('deletes dist command mdx files when only a legacy cn source remains', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-legacy-test-')) + const srcDir = path.join(tempWorkspace, 'aindex', 'commands') + const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'commands') + const distFile = path.join(distDir, 'demo.mdx') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync(path.join(srcDir, `demo${legacySourceExtension}`), '---\ndescription: legacy\n---\nLegacy prompt', 'utf8') + fs.writeFileSync(distFile, 'Compiled prompt', 'utf8') + + const plugin = new OrphanFileCleanupEffectInputCapability() + const [result] = await plugin.executeEffects(createContext(tempWorkspace)) + + expect(result?.success).toBe(true) + expect(fs.existsSync(distFile)).toBe(false) + expect(result?.deletedDirs ?? []).toContain(path.join(tempWorkspace, 'aindex', 'dist', 'commands')) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('fails when an orphan cleanup candidate hits an exact protected path', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-guard-exact-')) + const safeDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'commands', 'safe.mdx') + const globalConfigPath = path.join(os.homedir(), '.aindex', '.tnmsc.json') + + try { + fs.mkdirSync(path.dirname(safeDistFile), {recursive: true}) + fs.writeFileSync(safeDistFile, 'Compiled prompt', 'utf8') + + const plugin = new TestOrphanFileCleanupEffectInputCapability(() => ({ + filesToDelete: [safeDistFile, globalConfigPath], + dirsToDelete: [], + errors: [] + })) + + await expect(plugin.executeEffects(createContext(tempWorkspace))).rejects.toThrow('Protected deletion guard blocked orphan-file-cleanup') + expect(fs.existsSync(safeDistFile)).toBe(true) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('fails without partial deletion when safe and subtree-protected candidates are mixed', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-guard-subtree-')) + const srcDir = path.join(tempWorkspace, 'aindex', 'commands') + const protectedSourceFile = path.join(srcDir, 'demo.src.mdx') + const safeDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'commands', 'safe.mdx') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(path.dirname(safeDistFile), {recursive: true}) + fs.writeFileSync(protectedSourceFile, '---\ndescription: source\n---\nSource prompt', 'utf8') + fs.writeFileSync(safeDistFile, 'Compiled prompt', 'utf8') + + const plugin = new TestOrphanFileCleanupEffectInputCapability(() => ({ + filesToDelete: [safeDistFile, protectedSourceFile], + dirsToDelete: [], + errors: [] + })) + + await expect(plugin.executeEffects(createContext(tempWorkspace))).rejects.toThrow('Protected deletion guard blocked orphan-file-cleanup') + expect(fs.existsSync(safeDistFile)).toBe(true) + expect(fs.existsSync(protectedSourceFile)).toBe(true) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('collapses nested orphan directories to the highest removable subtree root', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-collapse-test-')) + const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'commands', 'legacy', 'deep') + const orphanFile = path.join(distDir, 'demo.txt') + + try { + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync(orphanFile, 'Compiled prompt', 'utf8') + + const plugin = new OrphanFileCleanupEffectInputCapability() + const [result] = await plugin.executeEffects(createContext(tempWorkspace)) + + expect(result?.success).toBe(true) + expect(result?.deletedFiles).toEqual([]) + expect(result?.deletedDirs).toEqual([path.join(tempWorkspace, 'aindex', 'dist', 'commands')]) + expect(fs.existsSync(path.join(tempWorkspace, 'aindex', 'dist', 'commands'))).toBe(false) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('cleans orphaned ext and arch dist files using matching series source roots', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-series-')) + const extSrcFile = path.join(tempWorkspace, 'aindex', 'ext', 'plugin-a', 'agt.src.mdx') + const extDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'ext', 'plugin-a', 'agt.mdx') + const archDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'arch', 'system-a', 'agt.mdx') + + try { + fs.mkdirSync(path.dirname(extSrcFile), {recursive: true}) + fs.mkdirSync(path.dirname(extDistFile), {recursive: true}) + fs.mkdirSync(path.dirname(archDistFile), {recursive: true}) + fs.writeFileSync(extSrcFile, '---\ndescription: ext\n---\nExt prompt', 'utf8') + fs.writeFileSync(extDistFile, 'Ext dist', 'utf8') + fs.writeFileSync(archDistFile, 'Arch dist', 'utf8') + + const plugin = new OrphanFileCleanupEffectInputCapability() + const [result] = await plugin.executeEffects(createContext(tempWorkspace)) + + expect(result?.success).toBe(true) + expect(fs.existsSync(extDistFile)).toBe(true) + expect(fs.existsSync(archDistFile)).toBe(false) + expect(result?.deletedDirs ?? []).toContain(path.join(tempWorkspace, 'aindex', 'dist', 'arch')) + expect(result?.deletedFiles ?? []).not.toContain(extDistFile) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('cleans orphaned softwares dist files using the matching software source root', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-softwares-')) + const softwareSrcFile = path.join(tempWorkspace, 'aindex', 'softwares', 'tool-a', 'agt.src.mdx') + const softwareDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'softwares', 'tool-a', 'agt.mdx') + const orphanSoftwareDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'softwares', 'tool-b', 'agt.mdx') + + try { + fs.mkdirSync(path.dirname(softwareSrcFile), {recursive: true}) + fs.mkdirSync(path.dirname(softwareDistFile), {recursive: true}) + fs.mkdirSync(path.dirname(orphanSoftwareDistFile), {recursive: true}) + fs.writeFileSync(softwareSrcFile, '---\ndescription: software\n---\nSoftware prompt', 'utf8') + fs.writeFileSync(softwareDistFile, 'Software dist', 'utf8') + fs.writeFileSync(orphanSoftwareDistFile, 'Orphan software dist', 'utf8') + + const plugin = new OrphanFileCleanupEffectInputCapability() + const [result] = await plugin.executeEffects(createContext(tempWorkspace)) + + expect(result?.success).toBe(true) + expect(fs.existsSync(softwareDistFile)).toBe(true) + expect(fs.existsSync(orphanSoftwareDistFile)).toBe(false) + expect(result?.deletedDirs ?? []).toContain(path.dirname(orphanSoftwareDistFile)) + expect(result?.deletedFiles ?? []).not.toContain(softwareDistFile) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('cleans orphaned subagent dist files using the configured subagents source root', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-subagents-')) + const subAgentSrcFile = path.join(tempWorkspace, 'aindex', 'subagents', 'qa', 'boot.src.mdx') + const subAgentDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'subagents', 'qa', 'boot.mdx') + const orphanSubAgentDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'subagents', 'ops', 'boot.mdx') + + try { + fs.mkdirSync(path.dirname(subAgentSrcFile), {recursive: true}) + fs.mkdirSync(path.dirname(subAgentDistFile), {recursive: true}) + fs.mkdirSync(path.dirname(orphanSubAgentDistFile), {recursive: true}) + fs.writeFileSync(subAgentSrcFile, '---\ndescription: subagent\n---\nSubagent prompt', 'utf8') + fs.writeFileSync(subAgentDistFile, 'Subagent dist', 'utf8') + fs.writeFileSync(orphanSubAgentDistFile, 'Orphan subagent dist', 'utf8') + + const plugin = new OrphanFileCleanupEffectInputCapability() + const [result] = await plugin.executeEffects(createContext(tempWorkspace)) + + expect(result?.success).toBe(true) + expect(fs.existsSync(subAgentDistFile)).toBe(true) + expect(fs.existsSync(orphanSubAgentDistFile)).toBe(false) + expect(result?.deletedDirs ?? []).toContain(path.dirname(orphanSubAgentDistFile)) + expect(result?.deletedFiles ?? []).not.toContain(subAgentDistFile) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/inputs/effect-orphan-cleanup.ts b/sdk/src/inputs/effect-orphan-cleanup.ts new file mode 100644 index 00000000..a0356d58 --- /dev/null +++ b/sdk/src/inputs/effect-orphan-cleanup.ts @@ -0,0 +1,308 @@ +import type { + AindexPromptTreeDirectoryPairKey, + InputCapabilityContext, + InputCollectedContext, + InputEffectContext, + InputEffectResult, + PluginOptions +} from '../plugins/plugin-core' +import {buildFileOperationDiagnostic} from '@/diagnostics' +import {compactDeletionTargets} from '../cleanup/delete-targets' +import {deleteTargets} from '../core/desk-paths' +import { + AbstractInputCapability, + AINDEX_PROMPT_TREE_DIRECTORY_PAIR_KEYS, + SourcePromptFileExtensions +} from '../plugins/plugin-core' +import { + collectConfiguredAindexInputRules, + createProtectedDeletionGuard, + partitionDeletionTargets, + ProtectedDeletionGuardError +} from '../ProtectedDeletionGuard' + +export interface OrphanCleanupEffectResult extends InputEffectResult { + readonly deletedFiles: string[] + readonly deletedDirs: string[] +} + +interface OrphanCleanupDirectoryConfig { + readonly key: AindexPromptTreeDirectoryPairKey + readonly srcPath: string + readonly distPath: string +} + +interface OrphanCleanupPlan { + readonly filesToDelete: string[] + readonly dirsToDelete: string[] + readonly errors: readonly {path: string, error: Error}[] +} + +export class OrphanFileCleanupEffectInputCapability extends AbstractInputCapability { + constructor() { + super('OrphanFileCleanupEffectInputCapability') + this.registerEffect('orphan-file-cleanup', this.cleanupOrphanFiles.bind(this), 20) + } + + protected buildProtectedDeletionGuard(ctx: InputEffectContext): ReturnType { + return createProtectedDeletionGuard({ + workspaceDir: ctx.workspaceDir, + aindexDir: ctx.aindexDir, + includeReservedWorkspaceContentRoots: false, + rules: [ + ...collectConfiguredAindexInputRules(ctx.userConfigOptions, ctx.aindexDir, { + workspaceDir: ctx.workspaceDir + }), + ...(ctx.userConfigOptions.cleanupProtection?.rules ?? []).map(rule => ({ + path: rule.path, + protectionMode: rule.protectionMode, + reason: rule.reason ?? 'configured cleanup protection rule', + source: 'configured-cleanup-protection', + matcher: rule.matcher ?? 'path' + })) + ] + }) + } + + protected buildDeletionPlan( + ctx: InputEffectContext, + directoryConfigs: readonly OrphanCleanupDirectoryConfig[] + ): OrphanCleanupPlan { + const filesToDelete: string[] = [] + const dirsToDelete: string[] = [] + const errors: {path: string, error: Error}[] = [] + + for (const directoryConfig of directoryConfigs) { + const distSubDirPath = ctx.path.join(ctx.aindexDir, directoryConfig.distPath) + if (!ctx.fs.existsSync(distSubDirPath)) continue + if (!ctx.fs.statSync(distSubDirPath).isDirectory()) continue + const subDirWillBeEmpty = this.collectDirectoryPlan(ctx, distSubDirPath, directoryConfig, filesToDelete, dirsToDelete, errors) + if (subDirWillBeEmpty) dirsToDelete.push(distSubDirPath) + } + + return {filesToDelete, dirsToDelete, errors} + } + + protected resolveDirectoryConfigs(options: Required): readonly OrphanCleanupDirectoryConfig[] { + return AINDEX_PROMPT_TREE_DIRECTORY_PAIR_KEYS.map(key => ({ + key, + srcPath: options.aindex[key].src, + distPath: options.aindex[key].dist + })) + } + + private async cleanupOrphanFiles(ctx: InputEffectContext): Promise { + const {fs, path, aindexDir, logger, userConfigOptions, dryRun} = ctx + const distDir = path.join(aindexDir, 'dist') + + if (!fs.existsSync(distDir)) { + logger.debug({action: 'orphan-cleanup', message: 'dist/ directory does not exist, skipping', distDir}) + return { + success: true, + description: 'dist/ directory does not exist, nothing to clean', + deletedFiles: [], + deletedDirs: [] + } + } + + const plan = this.buildDeletionPlan(ctx, this.resolveDirectoryConfigs(userConfigOptions)) + + const guard = this.buildProtectedDeletionGuard(ctx) + const filePartition = partitionDeletionTargets(plan.filesToDelete, guard) + const dirPartition = partitionDeletionTargets(plan.dirsToDelete, guard) + const compactedPlan = compactDeletionTargets(filePartition.safePaths, dirPartition.safePaths) + const violations = [...filePartition.violations, ...dirPartition.violations].sort((a, b) => a.targetPath.localeCompare(b.targetPath)) + + if (violations.length > 0) { + return { + success: false, + description: `Protected deletion guard blocked orphan cleanup for ${violations.length} path(s)`, + deletedFiles: [], + deletedDirs: [], + error: new ProtectedDeletionGuardError('orphan-file-cleanup', violations) + } + } + + if (dryRun) { + return { + success: true, + description: `Would delete ${compactedPlan.files.length} files and ${compactedPlan.dirs.length} directories`, + deletedFiles: [...compactedPlan.files], + deletedDirs: [...compactedPlan.dirs] + } + } + + const deleteErrors: {path: string, error: Error}[] = [...plan.errors] + logger.debug('orphan cleanup delete execution started', { + filesToDelete: compactedPlan.files.length, + dirsToDelete: compactedPlan.dirs.length + }) + + const result = await deleteTargets({ + files: compactedPlan.files, + dirs: compactedPlan.dirs + }) + + for (const fileError of result.fileErrors) { + const normalizedError = fileError.error instanceof Error ? fileError.error : new Error(String(fileError.error)) + deleteErrors.push({path: fileError.path, error: normalizedError}) + logger.warn(buildFileOperationDiagnostic({ + code: 'ORPHAN_CLEANUP_FILE_DELETE_FAILED', + title: 'Orphan cleanup could not delete a file', + operation: 'delete', + targetKind: 'orphan file', + path: fileError.path, + error: normalizedError + })) + } + + for (const dirError of result.dirErrors) { + const normalizedError = dirError.error instanceof Error ? dirError.error : new Error(String(dirError.error)) + deleteErrors.push({path: dirError.path, error: normalizedError}) + logger.warn(buildFileOperationDiagnostic({ + code: 'ORPHAN_CLEANUP_DIRECTORY_DELETE_FAILED', + title: 'Orphan cleanup could not delete a directory', + operation: 'delete', + targetKind: 'orphan directory', + path: dirError.path, + error: normalizedError + })) + } + + logger.debug('orphan cleanup delete execution complete', { + deletedFiles: result.deletedFiles.length, + deletedDirs: result.deletedDirs.length, + errors: deleteErrors.length + }) + + const hasErrors = deleteErrors.length > 0 + return { + success: !hasErrors, + description: `Deleted ${result.deletedFiles.length} files and ${result.deletedDirs.length} directories`, + deletedFiles: [...result.deletedFiles], + deletedDirs: [...result.deletedDirs], + ...hasErrors && {error: new Error(`${deleteErrors.length} errors occurred during cleanup`)} + } + } + + protected collectDirectoryPlan( + ctx: InputEffectContext, + distDirPath: string, + directoryConfig: OrphanCleanupDirectoryConfig, + filesToDelete: string[], + dirsToDelete: string[], + errors: {path: string, error: Error}[] + ): boolean { + const {fs, path, aindexDir, logger} = ctx + + let entries: import('node:fs').Dirent[] + try { + entries = fs.readdirSync(distDirPath, {withFileTypes: true}) + } + catch (error) { + errors.push({path: distDirPath, error: error as Error}) + logger.warn(buildFileOperationDiagnostic({ + code: 'ORPHAN_CLEANUP_DIRECTORY_READ_FAILED', + title: 'Orphan cleanup could not read a directory', + operation: 'read', + targetKind: 'dist cleanup directory', + path: distDirPath, + error + })) + return false + } + + let hasRetainedEntries = false + + for (const entry of entries) { + const entryPath = path.join(distDirPath, entry.name) + + if (entry.isDirectory()) { + const childWillBeEmpty = this.collectDirectoryPlan( + ctx, + entryPath, + directoryConfig, + filesToDelete, + dirsToDelete, + errors + ) + if (childWillBeEmpty) dirsToDelete.push(entryPath) + else hasRetainedEntries = true + continue + } + + if (!entry.isFile()) { + hasRetainedEntries = true + continue + } + + const isOrphan = this.isOrphanFile(ctx, entryPath, directoryConfig, aindexDir) + if (isOrphan) filesToDelete.push(entryPath) + else hasRetainedEntries = true + } + + return !hasRetainedEntries + } + + private isOrphanFile( + ctx: InputEffectContext, + distFilePath: string, + directoryConfig: OrphanCleanupDirectoryConfig, + aindexDir: string + ): boolean { + const {fs, path} = ctx + + const fileName = path.basename(distFilePath) + const isMdxFile = fileName.endsWith('.mdx') + + const distTypeDir = path.join(aindexDir, directoryConfig.distPath) + const relativeFromType = path.relative(distTypeDir, distFilePath) + const relativeDir = path.dirname(relativeFromType) + const baseName = fileName.replace(/\.mdx$/, '') + + if (!isMdxFile) return !fs.existsSync(path.join(aindexDir, directoryConfig.srcPath, relativeFromType)) + + const possibleSrcPaths = this.getPossibleSourcePaths( + path, + aindexDir, + directoryConfig.key, + directoryConfig.srcPath, + baseName, + relativeDir + ) + return !possibleSrcPaths.some(candidatePath => fs.existsSync(candidatePath)) + } + + private getPossibleSourcePaths( + nodePath: typeof import('node:path'), + aindexDir: string, + directoryKey: AindexPromptTreeDirectoryPairKey, + srcPath: string, + baseName: string, + relativeDir: string + ): string[] { + if (directoryKey === 'skills') { + const skillParts = relativeDir === '.' ? [baseName] : relativeDir.split(nodePath.sep) + const skillName = skillParts[0] ?? baseName + const remainingPath = relativeDir === '.' ? '' : relativeDir.slice(skillName.length + 1) + + if (remainingPath !== '') { + return SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, skillName, remainingPath, `${baseName}${extension}`)) + } + + return [ + ...SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, skillName, `SKILL${extension}`)), + ...SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, skillName, `skill${extension}`)) + ] + } + + return relativeDir === '.' + ? SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, `${baseName}${extension}`)) + : SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, relativeDir, `${baseName}${extension}`)) + } + + collect(ctx: InputCapabilityContext): Partial { + void ctx + return {} + } +} diff --git a/sdk/src/inputs/effect-skill-sync.test.ts b/sdk/src/inputs/effect-skill-sync.test.ts new file mode 100644 index 00000000..2bc9ab11 --- /dev/null +++ b/sdk/src/inputs/effect-skill-sync.test.ts @@ -0,0 +1,115 @@ +import type {InputCapabilityContext, PluginOptions} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import {SkillDistCleanupEffectInputCapability} from './effect-skill-sync' + +function createContext( + tempWorkspace: string, + overrides?: Partial +): InputCapabilityContext { + const options = mergeConfig({workspaceDir: tempWorkspace}, overrides ?? {}) + + return { + logger: createLogger('SkillDistCleanupEffectInputCapabilityTest', 'error'), + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputCapabilityContext +} + +describe('skill dist cleanup effect', () => { + it('deletes non-mdx mirrored files while preserving compiled mdx files', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-dist-cleanup-test-')) + const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') + const nestedLegacyDir = path.join(distSkillDir, 'legacy') + + try { + fs.mkdirSync(nestedLegacyDir, {recursive: true}) + fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), 'Compiled skill', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'guide.mdx'), 'Compiled guide', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'guide.src.mdx'), 'Stale source mirror', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'notes.md'), 'Legacy note', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'demo.kts'), 'println("legacy")', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'mcp.json'), '{"mcpServers":{}}', 'utf8') + fs.writeFileSync(path.join(nestedLegacyDir, 'diagram.svg'), '', 'utf8') + + const plugin = new SkillDistCleanupEffectInputCapability() + const [result] = await plugin.executeEffects(createContext(tempWorkspace)) + + expect(result?.success).toBe(true) + expect(fs.existsSync(path.join(distSkillDir, 'skill.mdx'))).toBe(true) + expect(fs.existsSync(path.join(distSkillDir, 'guide.mdx'))).toBe(true) + expect(fs.existsSync(path.join(distSkillDir, 'guide.src.mdx'))).toBe(false) + expect(fs.existsSync(path.join(distSkillDir, 'notes.md'))).toBe(false) + expect(fs.existsSync(path.join(distSkillDir, 'demo.kts'))).toBe(false) + expect(fs.existsSync(path.join(distSkillDir, 'mcp.json'))).toBe(false) + expect(fs.existsSync(path.join(nestedLegacyDir, 'diagram.svg'))).toBe(false) + expect(fs.existsSync(nestedLegacyDir)).toBe(false) + expect(result?.deletedFiles).toEqual(expect.arrayContaining([ + path.join(distSkillDir, 'guide.src.mdx'), + path.join(distSkillDir, 'notes.md'), + path.join(distSkillDir, 'demo.kts'), + path.join(distSkillDir, 'mcp.json') + ])) + expect(result?.deletedDirs).toContain(nestedLegacyDir) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('respects configured skills dist paths instead of hardcoded defaults', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-dist-cleanup-config-test-')) + const distSkillDir = path.join(tempWorkspace, 'aindex', 'compiled', 'skills', 'demo') + + try { + fs.mkdirSync(distSkillDir, {recursive: true}) + fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), 'Compiled skill', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'legacy.txt'), 'Legacy attachment', 'utf8') + + const plugin = new SkillDistCleanupEffectInputCapability() + const [result] = await plugin.executeEffects(createContext(tempWorkspace, { + aindex: { + skills: {src: 'abilities', dist: 'compiled/skills'} + } + })) + + expect(result?.success).toBe(true) + expect(fs.existsSync(path.join(distSkillDir, 'skill.mdx'))).toBe(true) + expect(fs.existsSync(path.join(distSkillDir, 'legacy.txt'))).toBe(false) + expect(result?.deletedFiles).toContain(path.join(distSkillDir, 'legacy.txt')) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('collapses nested removable skill dist directories to the highest safe root', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-dist-cleanup-collapse-test-')) + const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') + const nestedLegacyDir = path.join(distSkillDir, 'legacy', 'deep') + + try { + fs.mkdirSync(nestedLegacyDir, {recursive: true}) + fs.writeFileSync(path.join(nestedLegacyDir, 'diagram.svg'), '', 'utf8') + + const plugin = new SkillDistCleanupEffectInputCapability() + const [result] = await plugin.executeEffects(createContext(tempWorkspace)) + + expect(result?.success).toBe(true) + expect(result?.deletedFiles).toEqual([]) + expect(result?.deletedDirs).toEqual([path.join(tempWorkspace, 'aindex', 'dist', 'skills')]) + expect(fs.existsSync(path.join(tempWorkspace, 'aindex', 'dist', 'skills'))).toBe(false) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/inputs/effect-skill-sync.ts b/sdk/src/inputs/effect-skill-sync.ts new file mode 100644 index 00000000..61c2d89b --- /dev/null +++ b/sdk/src/inputs/effect-skill-sync.ts @@ -0,0 +1,181 @@ +import type {InputCapabilityContext, InputCollectedContext, InputEffectContext, InputEffectResult} from '../plugins/plugin-core' +import {buildFileOperationDiagnostic} from '@/diagnostics' +import {compactDeletionTargets} from '../cleanup/delete-targets' +import {deleteTargets} from '../core/desk-paths' +import {AbstractInputCapability, hasSourcePromptExtension} from '../plugins/plugin-core' + +export interface SkillDistCleanupEffectResult extends InputEffectResult { + readonly deletedFiles: string[] + readonly deletedDirs: string[] +} + +interface SkillDistCleanupPlan { + readonly filesToDelete: string[] + readonly dirsToDelete: string[] + readonly errors: readonly {path: string, error: Error}[] +} + +export class SkillDistCleanupEffectInputCapability extends AbstractInputCapability { + constructor() { + super('SkillDistCleanupEffectInputCapability') + this.registerEffect('skill-dist-cleanup', this.cleanupDistSkillArtifacts.bind(this), 10) + } + + private async cleanupDistSkillArtifacts(ctx: InputEffectContext): Promise { + const {fs, logger, userConfigOptions, aindexDir, dryRun} = ctx + const srcSkillsDir = this.resolveAindexPath(userConfigOptions.aindex.skills.src, aindexDir) + const distSkillsDir = this.resolveAindexPath(userConfigOptions.aindex.skills.dist, aindexDir) + + if (!fs.existsSync(distSkillsDir)) { + logger.debug({action: 'skill-dist-cleanup', message: 'dist skills directory does not exist, skipping', srcSkillsDir, distSkillsDir}) + return { + success: true, + description: 'dist skills directory does not exist, nothing to clean', + deletedFiles: [], + deletedDirs: [] + } + } + + const plan = this.buildCleanupPlan(ctx, distSkillsDir) + const compactedPlan = compactDeletionTargets(plan.filesToDelete, plan.dirsToDelete) + + if (dryRun) { + return { + success: true, + description: `Would delete ${compactedPlan.files.length} files and ${compactedPlan.dirs.length} directories`, + deletedFiles: [...compactedPlan.files], + deletedDirs: [...compactedPlan.dirs] + } + } + + const deleteErrors: {path: string, error: Error}[] = [...plan.errors] + logger.debug('skill dist cleanup delete execution started', { + filesToDelete: compactedPlan.files.length, + dirsToDelete: compactedPlan.dirs.length + }) + + const result = await deleteTargets({ + files: compactedPlan.files, + dirs: compactedPlan.dirs + }) + + for (const fileError of result.fileErrors) { + const normalizedError = fileError.error instanceof Error ? fileError.error : new Error(String(fileError.error)) + deleteErrors.push({path: fileError.path, error: normalizedError}) + logger.warn(buildFileOperationDiagnostic({ + code: 'SKILL_DIST_CLEANUP_FILE_DELETE_FAILED', + title: 'Skill dist cleanup could not delete a file', + operation: 'delete', + targetKind: 'skill dist file', + path: fileError.path, + error: normalizedError + })) + } + + for (const dirError of result.dirErrors) { + const normalizedError = dirError.error instanceof Error ? dirError.error : new Error(String(dirError.error)) + deleteErrors.push({path: dirError.path, error: normalizedError}) + logger.warn(buildFileOperationDiagnostic({ + code: 'SKILL_DIST_CLEANUP_DIRECTORY_DELETE_FAILED', + title: 'Skill dist cleanup could not delete a directory', + operation: 'delete', + targetKind: 'skill dist directory', + path: dirError.path, + error: normalizedError + })) + } + + logger.debug('skill dist cleanup delete execution complete', { + deletedFiles: result.deletedFiles.length, + deletedDirs: result.deletedDirs.length, + errors: deleteErrors.length + }) + + const hasErrors = deleteErrors.length > 0 + return { + success: !hasErrors, + description: `Deleted ${result.deletedFiles.length} files and ${result.deletedDirs.length} directories`, + deletedFiles: [...result.deletedFiles], + deletedDirs: [...result.deletedDirs], + ...hasErrors && {error: new Error(`${deleteErrors.length} errors occurred during cleanup`)} + } + } + + private buildCleanupPlan(ctx: InputEffectContext, distSkillsDir: string): SkillDistCleanupPlan { + const filesToDelete: string[] = [] + const dirsToDelete: string[] = [] + const errors: {path: string, error: Error}[] = [] + + const rootWillBeEmpty = this.collectCleanupPlan(ctx, distSkillsDir, filesToDelete, dirsToDelete, errors) + if (rootWillBeEmpty) dirsToDelete.push(distSkillsDir) + + return {filesToDelete, dirsToDelete, errors} + } + + private collectCleanupPlan( + ctx: InputEffectContext, + currentDir: string, + filesToDelete: string[], + dirsToDelete: string[], + errors: {path: string, error: Error}[] + ): boolean { + const {fs, path, logger} = ctx + + let entries: import('node:fs').Dirent[] + try { + entries = fs.readdirSync(currentDir, {withFileTypes: true}) + } + catch (error) { + errors.push({path: currentDir, error: error as Error}) + logger.warn(buildFileOperationDiagnostic({ + code: 'SKILL_DIST_CLEANUP_DIRECTORY_READ_FAILED', + title: 'Skill dist cleanup could not read a directory', + operation: 'read', + targetKind: 'skill dist directory', + path: currentDir, + error + })) + return false + } + + let hasRetainedEntries = false + + for (const entry of entries) { + const entryPath = path.join(currentDir, entry.name) + + if (entry.isDirectory()) { + const childWillBeEmpty = this.collectCleanupPlan(ctx, entryPath, filesToDelete, dirsToDelete, errors) + if (childWillBeEmpty) dirsToDelete.push(entryPath) + else hasRetainedEntries = true + continue + } + + if (!entry.isFile()) { + hasRetainedEntries = true + continue + } + + if (this.shouldRetainCompiledSkillFile(entry.name)) { + hasRetainedEntries = true + continue + } + + filesToDelete.push(entryPath) + } + + return !hasRetainedEntries + } + + private shouldRetainCompiledSkillFile(fileName: string): boolean { + return fileName.endsWith('.mdx') && !hasSourcePromptExtension(fileName) + } + + collect(ctx: InputCapabilityContext): Partial { + void ctx + return {} + } +} + +export type SkillSyncEffectResult = SkillDistCleanupEffectResult + +export class SkillNonSrcFileSyncEffectInputCapability extends SkillDistCleanupEffectInputCapability {} diff --git a/sdk/src/inputs/index.ts b/sdk/src/inputs/index.ts new file mode 100644 index 00000000..c16fe985 --- /dev/null +++ b/sdk/src/inputs/index.ts @@ -0,0 +1,59 @@ +export { + MarkdownWhitespaceCleanupEffectInputCapability +} from './effect-md-cleanup' +export { + OrphanFileCleanupEffectInputCapability +} from './effect-orphan-cleanup' +export { + SkillDistCleanupEffectInputCapability, + SkillNonSrcFileSyncEffectInputCapability +} from './effect-skill-sync' // Effect Input Plugins (按优先级排序: 10, 20, 30) + +export { + SkillInputCapability +} from './input-agentskills' +export { + AindexInputCapability +} from './input-aindex' +export { + CommandInputCapability +} from './input-command' +export { + EditorConfigInputCapability +} from './input-editorconfig' +export { + GitExcludeInputCapability +} from './input-git-exclude' +export { + GitIgnoreInputCapability +} from './input-gitignore' +export { + GlobalMemoryInputCapability +} from './input-global-memory' +export { + JetBrainsConfigInputCapability +} from './input-jetbrains-config' +export { + ProjectPromptInputCapability +} from './input-project-prompt' +export { + ReadmeMdInputCapability +} from './input-readme' +export { + RuleInputCapability +} from './input-rule' +export { + AIAgentIgnoreInputCapability +} from './input-shared-ignore' +export { + SubAgentInputCapability +} from './input-subagent' +export { + VSCodeConfigInputCapability +} from './input-vscode-config' +export { + WorkspaceInputCapability +} from './input-workspace' // Regular Input Plugins +export { + ZedConfigInputCapability +} from './input-zed-config' diff --git a/sdk/src/inputs/input-agentskills-export-fallback.test.ts b/sdk/src/inputs/input-agentskills-export-fallback.test.ts new file mode 100644 index 00000000..b19ace0d --- /dev/null +++ b/sdk/src/inputs/input-agentskills-export-fallback.test.ts @@ -0,0 +1,80 @@ +import type {ILogger, InputCapabilityContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {afterEach, describe, expect, it, vi} from 'vitest' +import {mergeConfig} from '../config' + +vi.mock('@truenine/md-compiler', () => ({ + mdxToMd: async (content: string) => ({ + content: content.replace(/export default\s*\{[\s\S]*?\}\s*/u, '').trim(), + metadata: { + fields: {}, + source: 'export' + } + }) +})) + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger +} + +function createContext(tempWorkspace: string, logger: ILogger): InputCapabilityContext { + const options = mergeConfig({workspaceDir: tempWorkspace}) + + return { + logger, + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputCapabilityContext +} + +afterEach(() => vi.resetModules()) + +describe('skill input plugin export fallback', () => { + it('uses export-default metadata when compiled metadata fields are empty', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-export-fallback-')) + const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') + const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') + + try { + fs.mkdirSync(srcSkillDir, {recursive: true}) + fs.mkdirSync(distSkillDir, {recursive: true}) + fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), `export default { + description: 'source export description', +} + +Source skill +`, 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), `export default { + description: 'dist export description', +} + +Dist skill +`, 'utf8') + + const {SkillInputCapability} = await import('./input-agentskills') + const plugin = new SkillInputCapability() + const result = await plugin.collect(createContext(tempWorkspace, createMockLogger())) + const [skill] = result.skills ?? [] + + expect(result.skills?.length ?? 0).toBe(1) + expect(skill?.yamlFrontMatter?.description).toBe('dist export description') + expect(skill?.content).toContain('Dist skill') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/inputs/input-agentskills-types.ts b/sdk/src/inputs/input-agentskills-types.ts new file mode 100644 index 00000000..24483bab --- /dev/null +++ b/sdk/src/inputs/input-agentskills-types.ts @@ -0,0 +1,10 @@ +/** + * Types for SkillInputCapability resource processing + */ + +import type {SkillChildDoc, SkillResource} from '../plugins/plugin-core' + +export interface ResourceScanResult { + readonly childDocs: SkillChildDoc[] + readonly resources: SkillResource[] +} diff --git a/sdk/src/inputs/input-agentskills.test.ts b/sdk/src/inputs/input-agentskills.test.ts new file mode 100644 index 00000000..f5881e15 --- /dev/null +++ b/sdk/src/inputs/input-agentskills.test.ts @@ -0,0 +1,179 @@ +import type {ILogger, InputCapabilityContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {SkillInputCapability} from './input-agentskills' + +function createMockLogger(warnings: string[] = [], errors: string[] = []): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: diagnostic => warnings.push(diagnostic.code), + error: diagnostic => errors.push(diagnostic.code), + fatal: diagnostic => errors.push(diagnostic.code) + } as ILogger +} + +function createContext(tempWorkspace: string, logger: ILogger): InputCapabilityContext { + const options = mergeConfig({workspaceDir: tempWorkspace}) + + return { + logger, + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputCapabilityContext +} + +describe('skill input plugin', () => { + it('reads compiled mdx from dist and non-mdx resources from src', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-test-')) + const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') + const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') + + try { + fs.mkdirSync(srcSkillDir, {recursive: true}) + fs.mkdirSync(distSkillDir, {recursive: true}) + fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src skill\n---\nSkill source', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, 'guide.src.mdx'), '---\ndescription: src guide\n---\nGuide source', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, 'notes.md'), 'Source notes', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, 'demo.kts'), 'println("source")', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, 'mcp.json'), '{"mcpServers":{"demo":{"command":"demo"}}}', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\ndescription: dist skill\n---\nexport const x = 1\n\nSkill dist', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'guide.mdx'), '---\ndescription: dist guide\n---\nGuide dist', 'utf8') + + const plugin = new SkillInputCapability() + const result = await plugin.collect(createContext(tempWorkspace, createMockLogger())) + const [skill] = result.skills ?? [] + + expect(result.skills?.length ?? 0).toBe(1) + expect(skill?.skillName).toBe('demo') + expect(skill?.content).toContain('Skill dist') + expect(skill?.content).not.toContain('Skill source') + expect(skill?.content).not.toContain('export const x = 1') + expect(skill?.yamlFrontMatter?.name).toBe('demo') + expect(skill?.yamlFrontMatter?.description).toBe('dist skill') + expect(skill?.childDocs?.map(childDoc => childDoc.relativePath)).toEqual(['guide.mdx']) + expect(skill?.childDocs?.[0]?.content).toContain('Guide dist') + expect(skill?.childDocs?.[0]?.content).not.toContain('Guide source') + expect(new Set(skill?.resources?.map(resource => resource.relativePath) ?? [])).toEqual(new Set(['demo.kts', 'notes.md'])) + expect(skill?.resources?.find(resource => resource.relativePath === 'notes.md')?.content).toBe('Source notes') + expect(skill?.resources?.find(resource => resource.relativePath === 'demo.kts')?.content).toContain('println("source")') + expect(skill?.mcpConfig?.mcpServers.demo?.command).toBe('demo') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('uses src resources even when a legacy dist copy still exists', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-resource-test-')) + const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') + const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') + + try { + fs.mkdirSync(srcSkillDir, {recursive: true}) + fs.mkdirSync(distSkillDir, {recursive: true}) + fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src skill\n---\nSkill source', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, 'notes.md'), 'Source notes', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\ndescription: dist skill\n---\nSkill dist', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'notes.md'), 'Legacy dist notes', 'utf8') + + const plugin = new SkillInputCapability() + const result = await plugin.collect(createContext(tempWorkspace, createMockLogger())) + const [skill] = result.skills ?? [] + + expect(skill?.resources?.find(resource => resource.relativePath === 'notes.md')?.content).toBe('Source notes') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('fails hard when child docs are missing compiled dist pairs', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-missing-child-test-')) + const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') + const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') + + try { + fs.mkdirSync(srcSkillDir, {recursive: true}) + fs.mkdirSync(distSkillDir, {recursive: true}) + fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src skill\n---\nSkill source', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, 'guide.src.mdx'), '---\ndescription: src guide\n---\nGuide source', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\ndescription: dist skill\n---\nSkill dist', 'utf8') + + const plugin = new SkillInputCapability() + await expect(plugin.collect(createContext(tempWorkspace, createMockLogger()))).rejects.toThrow('Missing compiled dist prompt') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('fails hard when the main skill exists only in src', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-main-missing-dist-test-')) + const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') + + try { + fs.mkdirSync(srcSkillDir, {recursive: true}) + fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src only skill\n---\nSkill source', 'utf8') + + const plugin = new SkillInputCapability() + await expect(plugin.collect(createContext(tempWorkspace, createMockLogger()))).rejects.toThrow('Missing compiled dist prompt') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('rejects workspace as an unsupported skill scope', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-workspace-scope-test-')) + const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') + const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') + + try { + fs.mkdirSync(srcSkillDir, {recursive: true}) + fs.mkdirSync(distSkillDir, {recursive: true}) + fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src skill\n---\nSkill source', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\ndescription: dist skill\nscope: workspace\n---\nSkill dist', 'utf8') + + const plugin = new SkillInputCapability() + await expect(plugin.collect(createContext(tempWorkspace, createMockLogger()))).rejects.toThrow('Field "scope" must be "project" or "global"') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('warns and ignores authored skill name metadata', async () => { + const warnings: string[] = [] + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-name-warning-test-')) + const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') + const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') + + try { + fs.mkdirSync(srcSkillDir, {recursive: true}) + fs.mkdirSync(distSkillDir, {recursive: true}) + fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\nname: custom-demo\ndescription: src skill\n---\nSkill source', 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\nname: custom-demo\ndescription: dist skill\n---\nSkill dist', 'utf8') + + const plugin = new SkillInputCapability() + const result = await plugin.collect(createContext(tempWorkspace, createMockLogger(warnings))) + const [skill] = result.skills ?? [] + + expect(skill?.skillName).toBe('demo') + expect(skill?.yamlFrontMatter?.name).toBe('demo') + expect(skill?.yamlFrontMatter?.description).toBe('dist skill') + expect(warnings).toContain('SKILL_NAME_IGNORED') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/inputs/input-agentskills.ts b/sdk/src/inputs/input-agentskills.ts new file mode 100644 index 00000000..129a430c --- /dev/null +++ b/sdk/src/inputs/input-agentskills.ts @@ -0,0 +1,836 @@ +import type {ParsedMarkdown} from '@truenine/md-compiler/markdown' +import type {Dirent} from 'node:fs' +import type { + ILogger, + InputCapabilityContext, + InputCollectedContext, + McpServerConfig, + SkillChildDoc, + SkillMcpConfig, + SkillPrompt, + SkillResource, + SkillResourceEncoding, + SkillYAMLFrontMatter +} from '../plugins/plugin-core' +import type {ResourceScanResult} from './input-agentskills-types' + +import {Buffer} from 'node:buffer' +import * as nodePath from 'node:path' +import {transformMdxReferencesToMd} from '@truenine/md-compiler/markdown' +import { + buildConfigDiagnostic, + buildDiagnostic, + buildFileOperationDiagnostic, + buildPathStateDiagnostic, + buildPromptCompilerDiagnostic, + diagnosticLines +} from '@/diagnostics' +import { + AbstractInputCapability, + createLocalizedPromptReader, + FilePathKind, + hasSourcePromptExtension, + PromptKind, + SourceLocaleExtensions, + validateSkillMetadata +} from '../plugins/plugin-core' +import {assertNoResidualModuleSyntax, MissingCompiledPromptError} from '../plugins/plugin-core/DistPromptGuards' +import {readPromptArtifact} from '../plugins/plugin-core/PromptArtifactCache' +import { + formatPromptCompilerDiagnostic, + resolveSourcePathForDistFile +} from '../plugins/plugin-core/PromptCompilerDiagnostics' + +export * from './input-agentskills-types' // Re-export from types file + +interface WritableSkillMetadata { + name?: string + description?: string + displayName?: string + keywords?: string[] + author?: string + version?: string + allowTools?: string[] + [key: string]: unknown +} + +const EXPORT_DEFAULT_REGEX = /export\s+default\s*\{([\s\S]*?)\}/u +const DESCRIPTION_REGEX = /description\s*:\s*['"`]([^'"`]+)['"`]/u +const NAME_REGEX = /name\s*:\s*['"`]([^'"`]+)['"`]/u +const DISPLAY_NAME_REGEX = /displayName\s*:\s*['"`]([^'"`]+)['"`]/u +const KEYWORDS_REGEX = /keywords\s*:\s*\[([^\]]+)\]/u +const AUTHOR_REGEX = /author\s*:\s*['"`]([^'"`]+)['"`]/u +const VERSION_REGEX = /version\s*:\s*['"`]([^'"`]+)['"`]/u + +function extractSkillMetadataFromExport(content: string): WritableSkillMetadata { + const metadata: WritableSkillMetadata = {} + + const exportMatch = EXPORT_DEFAULT_REGEX.exec(content) + if (exportMatch?.[1] == null) return metadata + + const objectContent = exportMatch[1] + + const descriptionMatch = DESCRIPTION_REGEX.exec(objectContent) + if (descriptionMatch?.[1] != null) metadata.description = descriptionMatch[1] + + const nameMatch = NAME_REGEX.exec(objectContent) + if (nameMatch?.[1] != null) metadata.name = nameMatch[1] + + const displayNameMatch = DISPLAY_NAME_REGEX.exec(objectContent) + if (displayNameMatch?.[1] != null) metadata.displayName = displayNameMatch[1] + + const keywordsMatch = KEYWORDS_REGEX.exec(objectContent) + if (keywordsMatch?.[1] != null) { + metadata.keywords = keywordsMatch[1] + .split(',') + .map(k => k.trim().replaceAll(/['"]/gu, '')) + .filter(k => k.length > 0) + } + + const authorMatch = AUTHOR_REGEX.exec(objectContent) + if (authorMatch?.[1] != null) metadata.author = authorMatch[1] + + const versionMatch = VERSION_REGEX.exec(objectContent) + if (versionMatch?.[1] != null) metadata.version = versionMatch[1] + + return metadata +} + +function mergeDefinedSkillMetadata( + ...sources: (Record | undefined)[] +): WritableSkillMetadata { + const merged: WritableSkillMetadata = {} + + for (const source of sources) { + if (source == null) continue + + for (const [key, value] of Object.entries(source)) { + if (value !== void 0) (merged as Record)[key] = value + } + } + + return merged +} + +function warnIgnoredSkillName(options: { + readonly logger: ILogger + readonly warnedDerivedNames?: Set + readonly sourcePath: string + readonly authoredName: string + readonly skillName: string +}): void { + const {logger, warnedDerivedNames, sourcePath, authoredName, skillName} = options + if (warnedDerivedNames?.has(sourcePath) === true) return + + warnedDerivedNames?.add(sourcePath) + logger.warn(buildConfigDiagnostic({ + code: 'SKILL_NAME_IGNORED', + title: 'Skill authored name is ignored', + reason: diagnosticLines( + `tnmsc ignores the authored skill name "${authoredName}" in favor of the directory-derived name "${skillName}".` + ), + configPath: sourcePath, + exactFix: diagnosticLines( + 'Remove the `name` field from the skill front matter or exported metadata.', + 'Rename the skill directory if you need a different skill name.' + ), + details: { + authoredName, + derivedName: skillName + } + })) +} + +const MIME_TYPES: Record = { // MIME types for resources + '.ts': 'text/typescript', + '.tsx': 'text/typescript', + '.js': 'text/javascript', + '.jsx': 'text/javascript', + '.json': 'application/json', + '.py': 'text/x-python', + '.java': 'text/x-java', + '.kt': 'text/x-kotlin', + '.go': 'text/x-go', + '.rs': 'text/x-rust', + '.c': 'text/x-c', + '.cpp': 'text/x-c++', + '.cs': 'text/x-csharp', + '.rb': 'text/x-ruby', + '.php': 'text/x-php', + '.swift': 'text/x-swift', + '.scala': 'text/x-scala', + '.sql': 'application/sql', + '.xml': 'application/xml', + '.yaml': 'text/yaml', + '.yml': 'text/yaml', + '.toml': 'text/toml', + '.csv': 'text/csv', + '.graphql': 'application/graphql', + '.txt': 'text/plain', + '.pdf': 'application/pdf', + '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + '.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', + '.html': 'text/html', + '.css': 'text/css', + '.svg': 'image/svg+xml', + '.png': 'image/png', + '.jpg': 'image/jpeg', + '.jpeg': 'image/jpeg', + '.gif': 'image/gif', + '.webp': 'image/webp', + '.ico': 'image/x-icon', + '.bmp': 'image/bmp' +} + +const SKILL_RESOURCE_BINARY_EXTENSIONS = new Set([ // Binary extensions + '.png', + '.jpg', + '.jpeg', + '.gif', + '.webp', + '.ico', + '.bmp', + '.tiff', + '.svg', + '.exe', + '.dll', + '.so', + '.dylib', + '.bin', + '.wasm', + '.class', + '.jar', + '.war', + '.pyd', + '.pyc', + '.pyo', + '.zip', + '.tar', + '.gz', + '.bz2', + '.7z', + '.rar', + '.ttf', + '.otf', + '.woff', + '.woff2', + '.eot', + '.db', + '.sqlite', + '.sqlite3', + '.pdf', + '.docx', + '.doc', + '.xlsx', + '.xls', + '.pptx', + '.ppt', + '.odt', + '.ods', + '.odp' +]) + +function isBinaryResourceExtension(ext: string): boolean { + return SKILL_RESOURCE_BINARY_EXTENSIONS.has(ext.toLowerCase()) +} + +function getMimeType(ext: string): string | undefined { + return MIME_TYPES[ext.toLowerCase()] +} + +function pathJoin(...segments: string[]): string { + const joined = nodePath.join(...segments) + return joined.replaceAll('\\', '/') +} + +interface ResourceProcessorContext { + readonly fs: typeof import('node:fs') + readonly logger: ILogger + readonly skillDir: string + readonly scanMode: 'distChildDocs' | 'srcResources' + readonly sourceSkillDir?: string + readonly globalScope?: InputCapabilityContext['globalScope'] +} + +class ResourceProcessor { + private readonly ctx: ResourceProcessorContext + + constructor(ctx: ResourceProcessorContext) { + this.ctx = ctx + } + + async processDirectory(entry: Dirent, currentRelativePath: string, filePath: string): Promise { + const relativePath = currentRelativePath + ? `${currentRelativePath}/${entry.name}` + : entry.name + return this.scanSkillDirectoryAsync(filePath, relativePath) + } + + async processFile(entry: Dirent, currentRelativePath: string, filePath: string): Promise { + const relativePath = currentRelativePath + ? `${currentRelativePath}/${entry.name}` + : entry.name + + if (this.ctx.scanMode === 'distChildDocs') { + if (currentRelativePath === '' && entry.name === 'skill.mdx') return {childDocs: [], resources: []} + if (hasSourcePromptExtension(entry.name) || !entry.name.endsWith('.mdx')) return {childDocs: [], resources: []} + + const childDoc = await this.processChildDoc(relativePath, filePath) + return {childDocs: childDoc ? [childDoc] : [], resources: []} + } + + if (currentRelativePath === '' && entry.name === 'mcp.json') return {childDocs: [], resources: []} + if (hasSourcePromptExtension(entry.name) || entry.name.endsWith('.mdx')) return {childDocs: [], resources: []} + + const resource = this.processResourceFile(entry.name, relativePath, filePath) + return {childDocs: [], resources: resource ? [resource] : []} + } + + private async processChildDoc(relativePath: string, filePath: string): Promise { + try { + const artifact = await readPromptArtifact(filePath, { + mode: 'dist', + globalScope: this.ctx.globalScope + }) + const compiledContent = transformMdxReferencesToMd(artifact.content) + assertNoResidualModuleSyntax(compiledContent, filePath) + + return { + type: PromptKind.SkillChildDoc, + content: compiledContent, + length: compiledContent.length, + filePathKind: FilePathKind.Relative, + markdownAst: artifact.parsed.markdownAst, + markdownContents: artifact.parsed.markdownContents, + ...artifact.parsed.rawFrontMatter != null && {rawFrontMatter: artifact.parsed.rawFrontMatter}, + relativePath, + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: this.ctx.skillDir, + getDirectoryName: () => nodePath.dirname(relativePath), + getAbsolutePath: () => filePath + } + } + } + catch (error) { + this.ctx.logger.error(buildPromptCompilerDiagnostic({ + code: 'SKILL_CHILD_DOC_COMPILE_FAILED', + title: 'Failed to compile skill child doc', + diagnosticText: formatPromptCompilerDiagnostic(error, { + operation: 'Failed to compile skill child doc.', + promptKind: 'skill-child-doc', + logicalName: `${nodePath.basename(this.ctx.skillDir)}/${relativePath.replace(/\.mdx$/u, '')}`, + distPath: filePath, + srcPath: resolveSourcePathForDistFile(nodePath, filePath, { + distRootDir: this.ctx.skillDir, + srcRootDir: this.ctx.sourceSkillDir + }) + }), + details: { + skillDir: this.ctx.skillDir, + relativePath, + filePath + } + })) + throw error + } + } + + private processResourceFile(fileName: string, relativePath: string, filePath: string): SkillResource | null { + const ext = nodePath.extname(fileName) + + try { + const {content, encoding, length} = this.readFileContent(filePath, ext) + const mimeType = getMimeType(ext) + + const resource: SkillResource = { + type: PromptKind.SkillResource, + extension: ext, + fileName, + relativePath, + sourcePath: filePath, + content, + encoding, + length, + ...mimeType != null && {mimeType} + } + + return resource + } + catch (e) { + this.ctx.logger.warn(buildFileOperationDiagnostic({ + code: 'SKILL_RESOURCE_READ_FAILED', + title: 'Failed to read skill resource file', + operation: 'read', + targetKind: 'skill resource file', + path: filePath, + error: e, + details: { + relativePath, + fileName, + skillDir: this.ctx.skillDir + } + })) + return null + } + } + + private readFileContent(filePath: string, ext: string): {content: string, encoding: SkillResourceEncoding, length: number} { + if (isBinaryResourceExtension(ext)) { + const buffer = this.ctx.fs.readFileSync(filePath) + return { + content: buffer.toString('base64'), + encoding: 'base64', + length: buffer.length + } + } + + const content = this.ctx.fs.readFileSync(filePath, 'utf8') + return { + content, + encoding: 'text', + length: Buffer.from(content, 'utf8').length + } + } + + async scanSkillDirectoryAsync(currentDir: string, currentRelativePath: string = ''): Promise { + const childDocs: SkillChildDoc[] = [] + const resources: SkillResource[] = [] + + let entries: Dirent[] + try { + entries = this.ctx.fs.readdirSync(currentDir, {withFileTypes: true}) + } + catch (e) { + this.ctx.logger.warn(buildFileOperationDiagnostic({ + code: 'SKILL_DIRECTORY_SCAN_FAILED', + title: 'Failed to scan skill directory', + operation: 'scan', + targetKind: 'skill directory', + path: currentDir, + error: e, + details: { + skillDir: this.ctx.skillDir, + scanMode: this.ctx.scanMode + } + })) + return {childDocs, resources} + } + + for (const entry of entries) { + const filePath = pathJoin(currentDir, entry.name) + + if (entry.isDirectory()) { + const subResult = await this.processDirectory(entry, currentRelativePath, filePath) + childDocs.push(...subResult.childDocs) + resources.push(...subResult.resources) + continue + } + + if (!entry.isFile()) continue + + const fileResult = await this.processFile(entry, currentRelativePath, filePath) + childDocs.push(...fileResult.childDocs) + resources.push(...fileResult.resources) + } + + return {childDocs, resources} + } +} + +function collectExpectedCompiledChildDocPaths( + skillDir: string, + fs: typeof import('node:fs'), + logger: ILogger, + currentRelativePath: string = '' +): string[] { + const expectedPaths: string[] = [] + const currentDir = currentRelativePath === '' + ? skillDir + : pathJoin(skillDir, currentRelativePath) + + let entries: Dirent[] + try { + entries = fs.readdirSync(currentDir, {withFileTypes: true}) + } + catch (error) { + logger.warn(buildFileOperationDiagnostic({ + code: 'SKILL_SOURCE_CHILD_SCAN_FAILED', + title: 'Failed to scan skill source child docs', + operation: 'scan', + targetKind: 'skill source child doc directory', + path: currentDir, + error + })) + return expectedPaths + } + + for (const entry of entries) { + const entryRelativePath = currentRelativePath + ? `${currentRelativePath}/${entry.name}` + : entry.name + + if (entry.isDirectory()) { + expectedPaths.push(...collectExpectedCompiledChildDocPaths(skillDir, fs, logger, entryRelativePath)) + continue + } + + if (!entry.isFile() || !hasSourcePromptExtension(entry.name)) continue + if (currentRelativePath === '' && entry.name === 'skill.src.mdx') continue + + expectedPaths.push(entryRelativePath.replace(/\.src\.mdx$/u, '.mdx')) + } + + return expectedPaths +} + +function assertCompiledChildDocsExist( + skillName: string, + skillSrcDir: string, + skillDistDir: string, + fs: typeof import('node:fs'), + logger: ILogger +): void { + if (!fs.existsSync(skillSrcDir)) return + + for (const relativePath of collectExpectedCompiledChildDocPaths(skillSrcDir, fs, logger)) { + const distPath = nodePath.join(skillDistDir, relativePath) + if (fs.existsSync(distPath)) continue + + throw new MissingCompiledPromptError({ + kind: 'skill child doc', + name: `${skillName}/${relativePath}`, + sourcePath: nodePath.join(skillSrcDir, relativePath.replace(/\.mdx$/u, '.src.mdx')), + expectedDistPath: distPath + }) + } +} + +function readMcpConfig( + skillDir: string, + fs: typeof import('node:fs'), + logger: ILogger +): SkillMcpConfig | undefined { + const mcpJsonPath = nodePath.join(skillDir, 'mcp.json') + + if (!fs.existsSync(mcpJsonPath)) return void 0 + + if (!fs.statSync(mcpJsonPath).isFile()) { + logger.warn(buildPathStateDiagnostic({ + code: 'SKILL_MCP_CONFIG_NOT_FILE', + title: 'Skill MCP config path is not a file', + path: mcpJsonPath, + expectedKind: 'mcp.json file', + actualState: 'path exists but is not a regular file', + details: { + skillDir + } + })) + return void 0 + } + + try { + const rawContent = fs.readFileSync(mcpJsonPath, 'utf8') + const parsed = JSON.parse(rawContent) as {mcpServers?: Record} + + if (parsed.mcpServers == null || typeof parsed.mcpServers !== 'object') { + logger.warn(buildConfigDiagnostic({ + code: 'SKILL_MCP_CONFIG_INVALID', + title: 'Skill MCP config is missing mcpServers', + reason: diagnosticLines( + `The skill MCP config at "${mcpJsonPath}" does not contain a top-level mcpServers object.` + ), + configPath: mcpJsonPath, + exactFix: diagnosticLines( + 'Add a top-level `mcpServers` object to mcp.json before retrying tnmsc.' + ), + details: { + skillDir + } + })) + return void 0 + } + + return { + type: PromptKind.SkillMcpConfig, + mcpServers: parsed.mcpServers, + rawContent + } + } + catch (e) { + logger.warn(buildConfigDiagnostic({ + code: 'SKILL_MCP_CONFIG_PARSE_FAILED', + title: 'Failed to parse skill MCP config', + reason: diagnosticLines( + `tnmsc could not parse the MCP config file at "${mcpJsonPath}".`, + `Underlying error: ${e instanceof Error ? e.message : String(e)}` + ), + configPath: mcpJsonPath, + exactFix: diagnosticLines('Fix the JSON syntax in mcp.json and rerun tnmsc.'), + details: { + skillDir, + errorMessage: e instanceof Error ? e.message : String(e) + } + })) + return void 0 + } +} + +async function createSkillPrompt( + content: string, + _locale: 'zh' | 'en', + name: string, + skillDir: string, + skillAbsoluteDir: string, + sourceSkillAbsoluteDir: string, + ctx: InputCapabilityContext, + mcpConfig?: SkillMcpConfig, + childDocs: SkillPrompt['childDocs'] = [], + resources: SkillPrompt['resources'] = [], + seriName?: string | string[] | null, + compiledMetadata?: Record, + warnedDerivedNames?: Set +): Promise { + const {logger, fs} = ctx + + const distFilePath = nodePath.join(skillAbsoluteDir, 'skill.mdx') + const sourceFilePath = fs.existsSync(nodePath.join(sourceSkillAbsoluteDir, 'skill.src.mdx')) + ? nodePath.join(sourceSkillAbsoluteDir, 'skill.src.mdx') + : distFilePath + let rawContent = content + let parsed: ParsedMarkdown | undefined, + distMetadata: Record | undefined + + if (fs.existsSync(distFilePath)) { + const artifact = await readPromptArtifact(distFilePath, { + mode: 'dist', + globalScope: ctx.globalScope + }) + rawContent = artifact.rawMdx + parsed = artifact.parsed as ParsedMarkdown + content = transformMdxReferencesToMd(artifact.content) + assertNoResidualModuleSyntax(content, distFilePath) + distMetadata = artifact.metadata + } + + const exportMetadata = mergeDefinedSkillMetadata( + extractSkillMetadataFromExport(rawContent), + compiledMetadata, + distMetadata + ) // Merge fallback export parsing with compiled metadata so empty metadata objects do not mask valid fields + + const authoredNames = new Set() + const yamlName = parsed?.yamlFrontMatter?.name + if (typeof yamlName === 'string' && yamlName.trim().length > 0) authoredNames.add(yamlName) + const exportedName = exportMetadata.name + if (typeof exportedName === 'string' && exportedName.trim().length > 0) authoredNames.add(exportedName) + + for (const authoredName of authoredNames) { + warnIgnoredSkillName({ + logger, + sourcePath: sourceFilePath, + authoredName, + skillName: name, + ...warnedDerivedNames != null && {warnedDerivedNames} + }) + } + + const finalDescription = parsed?.yamlFrontMatter?.description ?? exportMetadata?.description + + if (finalDescription == null || finalDescription.trim().length === 0) { // Strict validation: description must exist and not be empty + logger.error(buildDiagnostic({ + code: 'SKILL_VALIDATION_FAILED', + title: 'Skill description is required', + rootCause: diagnosticLines( + `The skill "${name}" does not provide a non-empty description in its compiled metadata or front matter.` + ), + exactFix: diagnosticLines( + 'Add a non-empty description field to the skill front matter or exported metadata and rebuild the skill.' + ), + possibleFixes: [ + diagnosticLines('Set `description` in `SKILL.md` front matter.'), + diagnosticLines('If you export metadata from code, ensure the exported description is non-empty.') + ], + details: { + skill: name, + skillDir, + yamlDescription: parsed?.yamlFrontMatter?.description, + exportDescription: exportMetadata?.description + } + })) + throw new Error(`Skill "${name}" validation failed: description is required and cannot be empty`) + } + + const mergedFrontMatter: SkillYAMLFrontMatter = { + ...exportMetadata, + ...parsed?.yamlFrontMatter ?? {}, + name, + description: finalDescription + } as SkillYAMLFrontMatter + + const validation = validateSkillMetadata(mergedFrontMatter as Record, distFilePath) + if (!validation.valid) throw new Error(validation.errors.join('\n')) + + return { + type: PromptKind.Skill, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + skillName: name, + yamlFrontMatter: mergedFrontMatter, + markdownAst: parsed?.markdownAst, + markdownContents: parsed?.markdownContents ?? [], + dir: { + pathKind: FilePathKind.Relative, + path: name, + basePath: skillDir, + getDirectoryName: () => name, + getAbsolutePath: () => nodePath.join(skillDir, name) + }, + ...parsed?.rawFrontMatter != null && {rawFrontMatter: parsed.rawFrontMatter}, + ...mcpConfig != null && {mcpConfig}, + ...childDocs != null && childDocs.length > 0 && {childDocs}, + ...resources != null && resources.length > 0 && {resources}, + ...seriName != null && {seriName} + } as SkillPrompt +} + +export class SkillInputCapability extends AbstractInputCapability { + constructor() { + super('SkillInputCapability') + } + + readMcpConfig( + skillDir: string, + fs: typeof import('node:fs'), + logger: ILogger + ): SkillMcpConfig | undefined { + return readMcpConfig(skillDir, fs, logger) + } + + async scanSkillDirectory( + skillDir: string, + fs: typeof import('node:fs'), + logger: ILogger, + currentRelativePath: string = '', + scanMode: 'distChildDocs' | 'srcResources' = 'srcResources', + globalScope?: InputCapabilityContext['globalScope'], + sourceSkillDir?: string + ): Promise { + const processor = new ResourceProcessor({ + fs, + logger, + skillDir, + scanMode, + ...globalScope != null && {globalScope}, + ...sourceSkillDir != null && {sourceSkillDir} + }) + return processor.scanSkillDirectoryAsync(skillDir, currentRelativePath) + } + + async collect(ctx: InputCapabilityContext): Promise> { + const {userConfigOptions: options, logger, fs, path: pathModule, globalScope} = ctx + const {aindexDir} = this.resolveBasePaths(options) + + const srcSkillDir = this.resolveAindexPath(options.aindex.skills.src, aindexDir) + const distSkillDir = this.resolveAindexPath(options.aindex.skills.dist, aindexDir) + + const flatSkills: SkillPrompt[] = [] + const reader = createLocalizedPromptReader(fs, pathModule, logger, globalScope) + const warnedDerivedNames = new Set() + const skillArtifactCache = new Map() + + const getSkillArtifacts = async (name: string): Promise<{ + readonly childDocs: SkillChildDoc[] + readonly resources: SkillResource[] + readonly mcpConfig?: SkillMcpConfig + }> => { + const cached = skillArtifactCache.get(name) + if (cached != null) return cached + + const skillSrcDir = pathModule.join(srcSkillDir, name) + const skillDistDir = pathModule.join(distSkillDir, name) + + const childDocs = fs.existsSync(skillDistDir) + ? (await this.scanSkillDirectory(skillDistDir, fs, logger, '', 'distChildDocs', globalScope, skillSrcDir)).childDocs + : [] + const resources = fs.existsSync(skillSrcDir) + ? (await this.scanSkillDirectory(skillSrcDir, fs, logger, '', 'srcResources', globalScope)).resources + : [] + const mcpConfig = readMcpConfig(skillSrcDir, fs, logger) + + assertCompiledChildDocsExist(name, skillSrcDir, skillDistDir, fs, logger) + + const artifacts = { + childDocs, + resources, + ...mcpConfig != null && {mcpConfig} + } + + skillArtifactCache.set(name, artifacts) + return artifacts + } + + const {prompts: localizedSkills, errors} = await reader.readDirectoryStructure( + srcSkillDir, + distSkillDir, + { + kind: PromptKind.Skill, + entryFileName: 'skill', + localeExtensions: SourceLocaleExtensions, + hydrateSourceContents: false, + isDirectoryStructure: true, + createPrompt: async (content, locale, name, metadata) => { + const skillDistDir = pathModule.join(distSkillDir, name) + const {childDocs, resources, mcpConfig} = await getSkillArtifacts(name) + + return createSkillPrompt( + content, + locale, + name, + distSkillDir, + skillDistDir, + pathModule.join(srcSkillDir, name), + ctx, + mcpConfig, + childDocs, + resources, + void 0, + metadata, + warnedDerivedNames + ) + } + } + ) + + for (const error of errors) { + logger.warn(buildFileOperationDiagnostic({ + code: 'SKILL_PROMPT_READ_FAILED', + title: 'Failed to read skill prompt', + operation: error.phase === 'scan' ? 'scan' : 'read', + targetKind: 'skill prompt', + path: error.path, + error: error.error, + details: { + phase: error.phase + } + })) + } + + if (errors.length > 0) throw new Error(errors.map(error => error.error instanceof Error ? error.error.message : String(error.error)).join('\n')) + + for (const localized of localizedSkills) { + const prompt = localized.dist?.prompt + if (prompt != null) flatSkills.push(prompt) + } + + return { + skills: flatSkills + } + } +} diff --git a/sdk/src/inputs/input-aindex.test.ts b/sdk/src/inputs/input-aindex.test.ts new file mode 100644 index 00000000..38f41bf9 --- /dev/null +++ b/sdk/src/inputs/input-aindex.test.ts @@ -0,0 +1,187 @@ +import type {InputCapabilityContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it, vi} from 'vitest' +import {mergeConfig} from '../config' +import {AindexInputCapability} from './input-aindex' + +function createLoggerMock(): { + readonly logger: InputCapabilityContext['logger'] + readonly error: ReturnType + readonly warn: ReturnType +} { + const error = vi.fn() + const warn = vi.fn() + + return { + logger: { + error, + warn, + info: vi.fn(), + debug: vi.fn(), + trace: vi.fn(), + fatal: vi.fn() + }, + error, + warn + } +} + +function createContext( + tempWorkspace: string, + logger: InputCapabilityContext['logger'] +): InputCapabilityContext { + return { + logger, + fs, + path, + glob, + userConfigOptions: mergeConfig({workspaceDir: tempWorkspace}), + dependencyContext: {} + } as InputCapabilityContext +} + +function createAindexProject( + tempWorkspace: string, + projectName: string, + series: 'app' | 'ext' | 'arch' | 'softwares' = 'app' +): { + readonly configDir: string +} { + const distProjectDir = path.join(tempWorkspace, 'aindex', 'dist', series, projectName) + const configDir = path.join(tempWorkspace, 'aindex', series, projectName) + + fs.mkdirSync(distProjectDir, {recursive: true}) + fs.mkdirSync(configDir, {recursive: true}) + + return {configDir} +} + +describe('aindex input capability project config loading', () => { + it('loads project.json5 using JSON5 features without any jsonc fallback', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-aindex-project-json5-')) + const {logger, warn} = createLoggerMock() + + try { + const {configDir} = createAindexProject(tempWorkspace, 'project-a') + fs.writeFileSync(path.join(configDir, 'project.json5'), [ + '{', + ' // JSON5 comment support', + ' includeSeries: [\'alpha\'],', + ' subSeries: {', + ' skills: [\'ship-*\'],', + ' },', + '}', + '' + ].join('\n'), 'utf8') + + const result = await new AindexInputCapability().collect(createContext(tempWorkspace, logger)) + const project = result.workspace?.projects[0] + + expect(project?.name).toBe('project-a') + expect(project?.projectConfig).toEqual({ + includeSeries: ['alpha'], + subSeries: { + skills: ['ship-*'] + } + }) + expect(warn).not.toHaveBeenCalled() + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('ignores legacy project.jsonc after the hard cut', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-aindex-project-jsonc-legacy-')) + const {logger, warn} = createLoggerMock() + + try { + const {configDir} = createAindexProject(tempWorkspace, 'project-b') + fs.writeFileSync(path.join(configDir, 'project.jsonc'), '{"includeSeries":["legacy"]}\n', 'utf8') + + const result = await new AindexInputCapability().collect(createContext(tempWorkspace, logger)) + const project = result.workspace?.projects[0] + + expect(project?.name).toBe('project-b') + expect(project?.projectConfig).toBeUndefined() + expect(warn).not.toHaveBeenCalled() + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('emits JSON5 diagnostics for invalid project.json5 syntax', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-aindex-project-json5-invalid-')) + const {logger, warn} = createLoggerMock() + + try { + const {configDir} = createAindexProject(tempWorkspace, 'project-c') + fs.writeFileSync(path.join(configDir, 'project.json5'), '{includeSeries: [\'broken\',]} trailing', 'utf8') + + const result = await new AindexInputCapability().collect(createContext(tempWorkspace, logger)) + const project = result.workspace?.projects[0] + const diagnostic = warn.mock.calls[0]?.[0] + + expect(project?.name).toBe('project-c') + expect(project?.projectConfig).toBeUndefined() + expect(warn).toHaveBeenCalledTimes(1) + expect(diagnostic).toEqual(expect.objectContaining({ + code: 'AINDEX_PROJECT_JSON5_INVALID', + title: 'Failed to parse project.json5 for project-c', + exactFix: ['Fix the JSON5 syntax in project.json5 and rerun tnmsc.'] + })) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('collects app, ext, arch, and softwares projects with series-aware metadata', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-aindex-project-series-')) + const {logger} = createLoggerMock() + + try { + createAindexProject(tempWorkspace, 'project-a', 'app') + createAindexProject(tempWorkspace, 'plugin-a', 'ext') + createAindexProject(tempWorkspace, 'system-a', 'arch') + createAindexProject(tempWorkspace, 'tool-a', 'softwares') + + const result = await new AindexInputCapability().collect(createContext(tempWorkspace, logger)) + const projects = result.workspace?.projects ?? [] + + expect(projects.map(project => `${project.promptSeries}:${project.name}`)).toEqual([ + 'app:project-a', + 'ext:plugin-a', + 'arch:system-a', + 'softwares:tool-a' + ]) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('fails fast when app, ext, arch, and softwares reuse the same project name', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-aindex-project-conflict-')) + const {logger, error} = createLoggerMock() + + try { + createAindexProject(tempWorkspace, 'project-a', 'app') + createAindexProject(tempWorkspace, 'project-a', 'softwares') + + await expect(new AindexInputCapability().collect(createContext(tempWorkspace, logger))) + .rejects + .toThrow('Aindex project series name conflict') + expect(error).toHaveBeenCalledWith(expect.objectContaining({ + code: 'AINDEX_PROJECT_SERIES_NAME_CONFLICT' + })) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/inputs/input-aindex.ts b/sdk/src/inputs/input-aindex.ts new file mode 100644 index 00000000..048d7216 --- /dev/null +++ b/sdk/src/inputs/input-aindex.ts @@ -0,0 +1,270 @@ +import type {InputCapabilityContext, InputCollectedContext, Project, ProjectConfig, Workspace} from '../plugins/plugin-core' +import type {AindexProjectSeriesConfig} from '@/aindex-project-series' + +import JSON5 from 'json5' +import { + collectAindexProjectSeriesProjectNameConflicts, + resolveAindexProjectSeriesConfigs +} from '@/aindex-project-series' +import { + buildConfigDiagnostic, + buildFileOperationDiagnostic, + diagnosticLines +} from '@/diagnostics' +import {AbstractInputCapability, FilePathKind} from '../plugins/plugin-core' + +export class AindexInputCapability extends AbstractInputCapability { + private static readonly projectConfigFileName = 'project.json5' + private static readonly conflictingProjectSeriesCode = 'AINDEX_PROJECT_SERIES_NAME_CONFLICT' + + constructor() { + super('AindexInputCapability') + } + + private loadProjectConfig( + projectName: string, + aindexDir: string, + srcPath: string, + fs: InputCapabilityContext['fs'], + path: InputCapabilityContext['path'], + logger: InputCapabilityContext['logger'] + ): ProjectConfig | undefined { + const configPath = path.join( + aindexDir, + srcPath, + projectName, + AindexInputCapability.projectConfigFileName + ) + if (!fs.existsSync(configPath)) return void 0 + + try { + const raw = fs.readFileSync(configPath, 'utf8') + + try { + return JSON5.parse(raw) + } + catch (e) { + logger.warn(buildConfigDiagnostic({ + code: 'AINDEX_PROJECT_JSON5_INVALID', + title: `Failed to parse ${AindexInputCapability.projectConfigFileName} for ${projectName}`, + reason: diagnosticLines( + `tnmsc could not parse the ${AindexInputCapability.projectConfigFileName} file for "${projectName}".`, + `Underlying error: ${e instanceof Error ? e.message : String(e)}` + ), + configPath, + exactFix: diagnosticLines( + `Fix the JSON5 syntax in ${AindexInputCapability.projectConfigFileName} and rerun tnmsc.` + ), + details: { + projectName, + errorMessage: e instanceof Error ? e.message : String(e) + } + })) + return void 0 + } + } + catch (e) { + logger.warn(buildConfigDiagnostic({ + code: 'AINDEX_PROJECT_JSON5_READ_FAILED', + title: `Failed to load ${AindexInputCapability.projectConfigFileName} for ${projectName}`, + reason: diagnosticLines( + `tnmsc could not read the ${AindexInputCapability.projectConfigFileName} file for "${projectName}".`, + `Underlying error: ${e instanceof Error ? e.message : String(e)}` + ), + configPath, + exactFix: diagnosticLines( + `Ensure ${AindexInputCapability.projectConfigFileName} exists, is readable, and contains valid JSON5.` + ), + details: { + projectName, + errorMessage: e instanceof Error ? e.message : String(e) + } + })) + return void 0 + } + } + + private async scanSeriesProjects( + ctx: InputCapabilityContext, + workspaceDir: string, + aindexDir: string, + aindexName: string, + projectNameSource: readonly AindexProjectSeriesConfig[] + ): Promise { + const {logger, fs, path} = ctx + const projectGroups = await Promise.all(projectNameSource.map(async series => { + const aindexProjectsDir = this.resolveAindexPath(series.dist, aindexDir) + const distDirStat = await fs.promises.stat(aindexProjectsDir).catch(() => void 0) + if (!(distDirStat?.isDirectory() === true)) return [] + + try { + const entries = (await fs.promises.readdir(aindexProjectsDir, {withFileTypes: true})) + .filter(entry => entry.isDirectory()) + .sort((a, b) => a.name.localeCompare(b.name)) + const projects: Project[] = [] + + for (const entry of entries) { + const isTheAindex = entry.name === aindexName + const projectConfig = this.loadProjectConfig(entry.name, aindexDir, series.src, fs, path, logger) + + projects.push({ + name: entry.name, + promptSeries: series.name, + ...isTheAindex && {isPromptSourceProject: true}, + ...projectConfig != null && {projectConfig}, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: entry.name, + basePath: workspaceDir, + getDirectoryName: () => entry.name, + getAbsolutePath: () => path.resolve(workspaceDir, entry.name) + } + }) + } + + return projects + } + catch (e) { + logger.error(buildFileOperationDiagnostic({ + code: 'AINDEX_PROJECT_DIRECTORY_SCAN_FAILED', + title: `Failed to scan aindex ${series.name} projects directory`, + operation: 'scan', + targetKind: `aindex ${series.name} projects directory`, + path: aindexProjectsDir, + error: e + })) + + return [] + } + })) + + return projectGroups.flat() + } + + private loadFallbackProjectConfig( + projectName: string, + aindexDir: string, + ctx: Pick + ): ProjectConfig | undefined { + for (const series of resolveAindexProjectSeriesConfigs(ctx.userConfigOptions)) { + const config = this.loadProjectConfig(projectName, aindexDir, series.src, ctx.fs, ctx.path, ctx.logger) + if (config != null) return config + } + + return void 0 + } + + private assertNoCrossSeriesProjectNameConflicts( + ctx: Pick, + aindexDir: string, + projectSeries: readonly AindexProjectSeriesConfig[] + ): void { + const {logger, fs, path} = ctx + const projectRefs = projectSeries.flatMap(series => { + const seriesSourceDir = path.join(aindexDir, series.src) + if (!(fs.existsSync(seriesSourceDir) && fs.statSync(seriesSourceDir).isDirectory())) return [] + + return fs + .readdirSync(seriesSourceDir, {withFileTypes: true}) + .filter(entry => entry.isDirectory()) + .map(entry => ({ + projectName: entry.name, + seriesName: series.name, + seriesDir: path.join(seriesSourceDir, entry.name) + })) + }) + const conflicts = collectAindexProjectSeriesProjectNameConflicts(projectRefs) + if (conflicts.length === 0) return + + logger.error(buildConfigDiagnostic({ + code: AindexInputCapability.conflictingProjectSeriesCode, + title: 'Project names must be unique across app, ext, arch, and softwares', + reason: diagnosticLines( + 'tnmsc maps project-scoped outputs back to workspace project names, so app/ext/arch/softwares cannot reuse the same directory name.', + `Conflicting project names: ${conflicts.map(conflict => conflict.projectName).join(', ')}` + ), + exactFix: diagnosticLines( + 'Rename the conflicting project directory in one of the app/ext/arch/softwares source trees and rerun tnmsc.' + ), + possibleFixes: conflicts.map(conflict => diagnosticLines( + `"${conflict.projectName}" is currently declared in: ${conflict.refs.map(ref => `${ref.seriesName} (${ref.seriesDir})`).join(', ')}` + )), + details: { + aindexDir, + conflicts: conflicts.map(conflict => ({ + projectName: conflict.projectName, + refs: conflict.refs.map(ref => ({ + seriesName: ref.seriesName, + seriesDir: ref.seriesDir + })) + })) + } + })) + + throw new Error('Aindex project series name conflict') + } + + async collect(ctx: InputCapabilityContext): Promise> { + const {userConfigOptions: options, logger, fs, path} = ctx + const {workspaceDir, aindexDir} = this.resolveBasePaths(options) + const aindexName = path.basename(aindexDir) + const projectSeries = resolveAindexProjectSeriesConfigs(options) + + // Project outputs intentionally collapse to /, so + // app/ext/arch/softwares must never reuse the same project directory name. + this.assertNoCrossSeriesProjectNameConflicts(ctx, aindexDir, projectSeries) + + const aindexProjects = await this.scanSeriesProjects(ctx, workspaceDir, aindexDir, aindexName, projectSeries) + + if (aindexProjects.length === 0 && fs.existsSync(workspaceDir) && fs.statSync(workspaceDir).isDirectory()) { + logger.debug('no projects in dist/app, dist/ext, or dist/arch; falling back to workspace scan', {workspaceDir}) + try { + const entries = fs + .readdirSync(workspaceDir, {withFileTypes: true}) + .filter(entry => entry.isDirectory()) + .sort((a, b) => a.name.localeCompare(b.name)) + + for (const entry of entries) { + if (entry.name.startsWith('.')) continue + + const isTheAindex = entry.name === aindexName + const projectConfig = this.loadFallbackProjectConfig(entry.name, aindexDir, ctx) + + aindexProjects.push({ + name: entry.name, + ...isTheAindex && {isPromptSourceProject: true}, + ...projectConfig != null && {projectConfig}, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: entry.name, + basePath: workspaceDir, + getDirectoryName: () => entry.name, + getAbsolutePath: () => path.resolve(workspaceDir, entry.name) + } + }) + } + } + catch (e) { + logger.error(buildFileOperationDiagnostic({ + code: 'WORKSPACE_DIRECTORY_SCAN_FAILED', + title: 'Failed to scan workspace directory', + operation: 'scan', + targetKind: 'workspace directory', + path: workspaceDir, + error: e + })) + } + } + + const workspace: Workspace = { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir) + }, + projects: aindexProjects + } + + return {workspace} + } +} diff --git a/sdk/src/inputs/input-command.test.ts b/sdk/src/inputs/input-command.test.ts new file mode 100644 index 00000000..49957086 --- /dev/null +++ b/sdk/src/inputs/input-command.test.ts @@ -0,0 +1,148 @@ +import type {InputCapabilityContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import {CommandInputCapability} from './input-command' + +const legacySourceExtension = '.cn.mdx' + +function createContext(tempWorkspace: string): InputCapabilityContext { + const options = mergeConfig({workspaceDir: tempWorkspace}) + + return { + logger: createLogger('CommandInputCapabilityTest', 'error'), + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputCapabilityContext +} + +describe('command input plugin', () => { + it('prefers dist content, and dist also runs mdx-to-md compilation', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const srcDir = path.join(aindexDir, 'commands') + const distDir = path.join(aindexDir, 'dist', 'commands') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + + const srcFile = path.join(srcDir, 'demo.src.mdx') + const distFile = path.join(distDir, 'demo.mdx') + const srcContent = '---\ndescription: src\n---\nCommand source' + const distContent = '---\ndescription: dist\n---\nexport const x = 1\n\nCommand dist' + fs.writeFileSync(srcFile, srcContent, 'utf8') + fs.writeFileSync(distFile, distContent, 'utf8') + + const plugin = new CommandInputCapability() + const result = await plugin.collect(createContext(tempWorkspace)) + const [command] = result.commands ?? [] + + expect(result.commands?.length ?? 0).toBe(1) + expect(command?.commandName).toBe('demo') + expect(command?.content).toContain('Command dist') + expect(command?.content).not.toContain('Command source') + expect(command?.content).not.toContain('export const x = 1') + expect(command?.yamlFrontMatter?.description).toBe('dist') + expect(command?.rawMdxContent).toContain('export const x = 1') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('loads commands from dist when the source tree is missing', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-dist-only-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const distDir = path.join(aindexDir, 'dist', 'commands') + + try { + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync( + path.join(distDir, 'demo.mdx'), + '---\ndescription: dist only\n---\nDist only command', + 'utf8' + ) + + const plugin = new CommandInputCapability() + const result = await plugin.collect(createContext(tempWorkspace)) + + expect(result.commands?.length ?? 0).toBe(1) + expect(result.commands?.[0]?.commandName).toBe('demo') + expect(result.commands?.[0]?.content).toContain('Dist only command') + expect(result.commands?.[0]?.yamlFrontMatter?.description).toBe('dist only') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('fails hard when source exists without a compiled dist pair', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-source-only-test-')) + const srcDir = path.join(tempWorkspace, 'aindex', 'commands') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.writeFileSync( + path.join(srcDir, 'demo.src.mdx'), + '---\ndescription: source only\n---\nSource only command', + 'utf8' + ) + + const plugin = new CommandInputCapability() + await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Missing compiled dist prompt') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('ignores legacy cn command sources', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-legacy-test-')) + const srcDir = path.join(tempWorkspace, 'aindex', 'commands') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.writeFileSync( + path.join(srcDir, `demo${legacySourceExtension}`), + '---\ndescription: legacy\n---\nLegacy command', + 'utf8' + ) + + const plugin = new CommandInputCapability() + const result = await plugin.collect(createContext(tempWorkspace)) + + expect(result.commands ?? []).toHaveLength(0) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('rejects workspace as an unsupported command scope', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-workspace-scope-test-')) + const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'commands') + + try { + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync( + path.join(distDir, 'demo.mdx'), + '---\nscope: workspace\n---\nDist only command', + 'utf8' + ) + + const plugin = new CommandInputCapability() + await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Field "scope" must be "project" or "global"') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/inputs/input-command.ts b/sdk/src/inputs/input-command.ts new file mode 100644 index 00000000..69026663 --- /dev/null +++ b/sdk/src/inputs/input-command.ts @@ -0,0 +1,152 @@ +import type { + CommandPrompt, + CommandYAMLFrontMatter, + InputCapabilityContext, + InputCollectedContext, + Locale +} from '../plugins/plugin-core' +import {buildFileOperationDiagnostic} from '@/diagnostics' +import { + AbstractInputCapability, + createLocalizedPromptReader, + FilePathKind, + PromptKind, + SourceLocaleExtensions, + validateCommandMetadata + +} from '../plugins/plugin-core' + +export class CommandInputCapability extends AbstractInputCapability { + constructor() { + super('CommandInputCapability') + } + + private createCommandPrompt( + content: string, + _locale: Locale, + name: string, + distDir: string, + ctx: InputCapabilityContext, + metadata?: Record + ): CommandPrompt { + const {path} = ctx + + const normalizedName = name.replaceAll('\\', '/') // Normalize Windows backslashes to forward slashes + const slashIndex = normalizedName.indexOf('/') + const parentDirName = slashIndex !== -1 ? normalizedName.slice(0, slashIndex) : void 0 + const fileName = slashIndex !== -1 ? normalizedName.slice(slashIndex + 1) : normalizedName + + const baseName = fileName.replace(/\.mdx$/, '') + const underscoreIndex = baseName.indexOf('_') + const commandPrefix = parentDirName ?? (underscoreIndex === -1 ? void 0 : baseName.slice(0, Math.max(0, underscoreIndex))) + const commandName = parentDirName != null || underscoreIndex === -1 + ? baseName + : baseName.slice(Math.max(0, underscoreIndex + 1)) + + const filePath = path.join(distDir, `${name}.mdx`) + const entryName = `${name}.mdx` + const yamlFrontMatter = metadata as CommandYAMLFrontMatter | undefined + + const prompt: CommandPrompt = { + type: PromptKind.Command, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: entryName, + basePath: distDir, + getDirectoryName: () => entryName.replace(/\.mdx$/, ''), + getAbsolutePath: () => filePath + }, + ...commandPrefix != null && {commandPrefix}, + commandName + } as CommandPrompt + + if (yamlFrontMatter == null) return prompt + + const validation = validateCommandMetadata(yamlFrontMatter as Record, filePath) + if (!validation.valid) throw new Error(validation.errors.join('\n')) + + Object.assign(prompt, {yamlFrontMatter}) + if (yamlFrontMatter.seriName != null) Object.assign(prompt, {seriName: yamlFrontMatter.seriName}) + if (yamlFrontMatter.scope === 'global') Object.assign(prompt, {globalOnly: true}) + return prompt + } + + override async collect(ctx: InputCapabilityContext): Promise> { + const {userConfigOptions: options, logger, path, fs, globalScope} = ctx + const resolvedPaths = this.resolveBasePaths(options) + + const srcDir = this.resolveAindexPath(options.aindex.commands.src, resolvedPaths.aindexDir) + const distDir = this.resolveAindexPath(options.aindex.commands.dist, resolvedPaths.aindexDir) + + logger.debug('CommandInputCapability collecting', { + srcDir, + distDir, + aindexDir: resolvedPaths.aindexDir + }) + + const reader = createLocalizedPromptReader(fs, path, logger, globalScope) + + const {prompts: localizedCommands, errors} = await reader.readFlatFiles( + srcDir, + distDir, + { + kind: PromptKind.Command, + localeExtensions: SourceLocaleExtensions, + hydrateSourceContents: false, + isDirectoryStructure: false, + createPrompt: (content, locale, name, metadata) => this.createCommandPrompt( + content, + locale, + name, + distDir, + ctx, + metadata + ) + } + ) + + logger.debug('CommandInputCapability read complete', { + commandCount: localizedCommands.length, + errorCount: errors.length + }) + + for (const error of errors) { + logger.warn(buildFileOperationDiagnostic({ + code: 'COMMAND_PROMPT_READ_FAILED', + title: 'Failed to read command prompt', + operation: error.phase === 'scan' ? 'scan' : 'read', + targetKind: 'command prompt', + path: error.path, + error: error.error, + details: { + phase: error.phase + } + })) + } + + if (errors.length > 0) throw new Error(errors.map(error => error.error instanceof Error ? error.error.message : String(error.error)).join('\n')) + + const flatCommands: CommandPrompt[] = [] + for (const localized of localizedCommands) { + const distContent = localized.dist + if (distContent?.prompt == null) continue + + const {prompt: distPrompt, rawMdx} = distContent + flatCommands.push(rawMdx == null + ? distPrompt + : {...distPrompt, rawMdxContent: rawMdx}) + } + + logger.debug('CommandInputCapability flattened commands', { + count: flatCommands.length, + commands: flatCommands.map(c => c.commandName) + }) + + return { + commands: flatCommands + } + } +} diff --git a/sdk/src/inputs/input-editorconfig.ts b/sdk/src/inputs/input-editorconfig.ts new file mode 100644 index 00000000..aaeda25f --- /dev/null +++ b/sdk/src/inputs/input-editorconfig.ts @@ -0,0 +1,23 @@ +import type {InputCapabilityContext, InputCollectedContext, ProjectIDEConfigFile} from '../plugins/plugin-core' +import {AbstractInputCapability, IDEKind} from '../plugins/plugin-core' +import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' + +export class EditorConfigInputCapability extends AbstractInputCapability { + constructor() { + super('EditorConfigInputCapability') + } + + collect(ctx: InputCapabilityContext): Partial { + const {userConfigOptions, fs} = ctx + const {workspaceDir, aindexDir} = this.resolveBasePaths(userConfigOptions) + + const editorConfigFiles: ProjectIDEConfigFile[] = [] + const file = readPublicIdeConfigDefinitionFile(IDEKind.EditorConfig, '.editorconfig', aindexDir, fs, { + command: ctx.runtimeCommand, + workspaceDir + }) + if (file != null) editorConfigFiles.push(file) + + return {editorConfigFiles} + } +} diff --git a/sdk/src/inputs/input-git-exclude.ts b/sdk/src/inputs/input-git-exclude.ts new file mode 100644 index 00000000..388aec02 --- /dev/null +++ b/sdk/src/inputs/input-git-exclude.ts @@ -0,0 +1,32 @@ +import type {InputCapabilityContext, InputCollectedContext} from '../plugins/plugin-core' +import {AbstractInputCapability} from '../plugins/plugin-core' +import {PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, resolvePublicDefinitionPath} from '../public-config-paths' + +export class GitExcludeInputCapability extends AbstractInputCapability { + constructor() { + super('GitExcludeInputCapability') + } + + collect(ctx: InputCapabilityContext): Partial { + const {workspaceDir, aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) + const filePath = resolvePublicDefinitionPath(aindexDir, PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, { + command: ctx.runtimeCommand, + workspaceDir + }) + + if (!ctx.fs.existsSync(filePath)) { + this.log.debug({action: 'collect', message: 'File not found', path: filePath}) + return {} + } + + const content = ctx.fs.readFileSync(filePath, 'utf8') + + if (content.length === 0) { + this.log.debug({action: 'collect', message: 'File is empty', path: filePath}) + return {} + } + + this.log.debug({action: 'collect', message: 'Loaded file content', path: filePath, length: content.length}) + return {shadowGitExclude: content} + } +} diff --git a/sdk/src/inputs/input-gitignore.ts b/sdk/src/inputs/input-gitignore.ts new file mode 100644 index 00000000..2dcd1cd3 --- /dev/null +++ b/sdk/src/inputs/input-gitignore.ts @@ -0,0 +1,32 @@ +import type {InputCapabilityContext, InputCollectedContext} from '../plugins/plugin-core' +import {AbstractInputCapability} from '../plugins/plugin-core' +import {PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, resolvePublicDefinitionPath} from '../public-config-paths' + +export class GitIgnoreInputCapability extends AbstractInputCapability { + constructor() { + super('GitIgnoreInputCapability') + } + + collect(ctx: InputCapabilityContext): Partial { + const {workspaceDir, aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) + const filePath = resolvePublicDefinitionPath(aindexDir, PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, { + command: ctx.runtimeCommand, + workspaceDir + }) + + if (!ctx.fs.existsSync(filePath)) { + this.log.debug({action: 'collect', message: 'File not found', path: filePath}) + return {} + } + + const content = ctx.fs.readFileSync(filePath, 'utf8') + + if (content.length === 0) { + this.log.debug({action: 'collect', message: 'File is empty', path: filePath}) + return {} + } + + this.log.debug({action: 'collect', message: 'Loaded file content', path: filePath, length: content.length}) + return {globalGitIgnore: content} + } +} diff --git a/sdk/src/inputs/input-global-memory.ts b/sdk/src/inputs/input-global-memory.ts new file mode 100644 index 00000000..c23faf34 --- /dev/null +++ b/sdk/src/inputs/input-global-memory.ts @@ -0,0 +1,136 @@ +import type {InputCapabilityContext, InputCollectedContext} from '../plugins/plugin-core' + +import process from 'node:process' + +import {CompilerDiagnosticError, ScopeError} from '@truenine/md-compiler/errors' +import {getGlobalConfigPath} from '@/ConfigLoader' +import { + buildConfigDiagnostic, + buildPathStateDiagnostic, + buildPromptCompilerDiagnostic, + diagnosticLines +} from '@/diagnostics' +import {getEffectiveHomeDir} from '@/runtime-environment' +import {AbstractInputCapability, FilePathKind, GlobalConfigDirectoryType, PromptKind} from '../plugins/plugin-core' +import {assertNoResidualModuleSyntax} from '../plugins/plugin-core/DistPromptGuards' +import {readPromptArtifact} from '../plugins/plugin-core/PromptArtifactCache' +import {formatPromptCompilerDiagnostic} from '../plugins/plugin-core/PromptCompilerDiagnostics' + +export class GlobalMemoryInputCapability extends AbstractInputCapability { + constructor() { + super('GlobalMemoryInputCapability') + } + + async collect(ctx: InputCapabilityContext): Promise> { + const {userConfigOptions: options, fs, path, globalScope} = ctx + const {aindexDir} = this.resolveBasePaths(options) + const globalConfigPath = getGlobalConfigPath() + const effectiveHomeDir = getEffectiveHomeDir() + + const globalMemoryFile = this.resolveAindexPath(options.aindex.globalPrompt.dist, aindexDir) + + if (!fs.existsSync(globalMemoryFile)) { + this.log.warn(buildPathStateDiagnostic({ + code: 'GLOBAL_MEMORY_PROMPT_MISSING', + title: 'Global memory prompt is missing', + path: globalMemoryFile, + expectedKind: 'compiled global memory prompt file', + actualState: 'path does not exist' + })) + return {} + } + + if (!fs.statSync(globalMemoryFile).isFile()) { + this.log.warn(buildPathStateDiagnostic({ + code: 'GLOBAL_MEMORY_PROMPT_NOT_FILE', + title: 'Global memory prompt path is not a file', + path: globalMemoryFile, + expectedKind: 'compiled global memory prompt file', + actualState: 'path exists but is not a regular file' + })) + return {} + } + + let compiledContent: string, + artifact: Awaited> + try { + artifact = await readPromptArtifact(globalMemoryFile, { + mode: 'dist', + globalScope + }) + compiledContent = artifact.content + assertNoResidualModuleSyntax(compiledContent, globalMemoryFile) + } + catch (e) { + if (e instanceof CompilerDiagnosticError) { + this.log.error(buildPromptCompilerDiagnostic({ + code: 'GLOBAL_MEMORY_PROMPT_COMPILE_FAILED', + title: 'Failed to compile global memory prompt', + diagnosticText: formatPromptCompilerDiagnostic(e, { + operation: 'Failed to compile global memory prompt.', + promptKind: 'global-memory', + logicalName: 'global-memory', + distPath: globalMemoryFile + }), + details: { + promptKind: 'global-memory', + distPath: globalMemoryFile + } + })) + if (e instanceof ScopeError) { + this.log.error(buildConfigDiagnostic({ + code: 'GLOBAL_MEMORY_SCOPE_VARIABLES_MISSING', + title: 'Global memory prompt references missing config variables', + reason: diagnosticLines( + `The global memory prompt uses scope variables that are not defined in "${globalConfigPath}".` + ), + configPath: globalConfigPath, + exactFix: diagnosticLines( + `Add the missing variables to "${globalConfigPath}" and rerun tnmsc.` + ), + possibleFixes: [ + diagnosticLines('If you reference `{profile.name}`, define `profile.name` in the config file.') + ], + details: { + promptPath: globalMemoryFile, + errorMessage: e.message + } + })) + } + process.exit(1) + } + throw e + } + + this.log.debug({action: 'collect', path: globalMemoryFile, contentLength: compiledContent.length}) + + return { + globalMemory: { + type: PromptKind.GlobalMemory, + content: compiledContent, + length: compiledContent.length, + filePathKind: FilePathKind.Relative, + ...artifact.parsed.rawFrontMatter != null && {rawFrontMatter: artifact.parsed.rawFrontMatter}, + markdownAst: artifact.parsed.markdownAst, + markdownContents: artifact.parsed.markdownContents, + dir: { + pathKind: FilePathKind.Relative, + path: path.basename(globalMemoryFile), + basePath: path.dirname(globalMemoryFile), + getDirectoryName: () => path.basename(globalMemoryFile), + getAbsolutePath: () => globalMemoryFile + }, + parentDirectoryPath: { + type: GlobalConfigDirectoryType.UserHome, + directory: { + pathKind: FilePathKind.Relative, + path: '', + basePath: effectiveHomeDir, + getDirectoryName: () => path.basename(effectiveHomeDir), + getAbsolutePath: () => effectiveHomeDir + } + } + } + } + } +} diff --git a/sdk/src/inputs/input-jetbrains-config.ts b/sdk/src/inputs/input-jetbrains-config.ts new file mode 100644 index 00000000..cc8f5e88 --- /dev/null +++ b/sdk/src/inputs/input-jetbrains-config.ts @@ -0,0 +1,31 @@ +import type {InputCapabilityContext, InputCollectedContext, ProjectIDEConfigFile} from '../plugins/plugin-core' +import {AbstractInputCapability, IDEKind} from '../plugins/plugin-core' +import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' + +export class JetBrainsConfigInputCapability extends AbstractInputCapability { + constructor() { + super('JetBrainsConfigInputCapability') + } + + collect(ctx: InputCapabilityContext): Partial { + const {userConfigOptions, fs} = ctx + const {workspaceDir, aindexDir} = this.resolveBasePaths(userConfigOptions) + + const files = [ + '.idea/codeStyles/Project.xml', + '.idea/codeStyles/codeStyleConfig.xml', + '.idea/.gitignore' + ] + const jetbrainsConfigFiles: ProjectIDEConfigFile[] = [] + + for (const relativePath of files) { + const file = readPublicIdeConfigDefinitionFile(IDEKind.IntellijIDEA, relativePath, aindexDir, fs, { + command: ctx.runtimeCommand, + workspaceDir + }) + if (file != null) jetbrainsConfigFiles.push(file) + } + + return {jetbrainsConfigFiles} + } +} diff --git a/sdk/src/inputs/input-project-prompt.test.ts b/sdk/src/inputs/input-project-prompt.test.ts new file mode 100644 index 00000000..6a5dd8a0 --- /dev/null +++ b/sdk/src/inputs/input-project-prompt.test.ts @@ -0,0 +1,176 @@ +import type {InputCapabilityContext, Project, Workspace} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger, FilePathKind, WORKSPACE_ROOT_PROJECT_NAME} from '../plugins/plugin-core' +import {ProjectPromptInputCapability} from './input-project-prompt' + +function createProject( + tempWorkspace: string, + name: string, + overrides: Partial = {} +): Project { + return { + name, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: name, + basePath: tempWorkspace, + getDirectoryName: () => name, + getAbsolutePath: () => path.join(tempWorkspace, name) + }, + ...overrides + } +} + +function createWorkspace(tempWorkspace: string, projects: readonly Project[] = [createProject(tempWorkspace, 'project-a')]): Workspace { + return { + directory: { + pathKind: FilePathKind.Absolute, + path: tempWorkspace, + getDirectoryName: () => path.basename(tempWorkspace), + getAbsolutePath: () => tempWorkspace + }, + projects: [...projects] + } +} + +function createContext(tempWorkspace: string, workspace: Workspace = createWorkspace(tempWorkspace)): InputCapabilityContext { + return { + logger: createLogger('ProjectPromptInputCapabilityTest', 'error'), + fs, + path, + glob, + userConfigOptions: mergeConfig({workspaceDir: tempWorkspace}), + dependencyContext: { + workspace + } + } as InputCapabilityContext +} + +describe('project prompt input plugin workspace prompt support', () => { + it('injects a synthetic workspace project from aindex/dist/workspace.mdx only', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-workspace-prompt-')) + const workspacePromptPath = path.join(tempWorkspace, 'aindex', 'dist', 'workspace.mdx') + + try { + fs.mkdirSync(path.dirname(workspacePromptPath), {recursive: true}) + fs.writeFileSync(workspacePromptPath, '---\ndescription: workspace\n---\nWorkspace prompt body', 'utf8') + + const plugin = new ProjectPromptInputCapability() + const result = await plugin.collect(createContext(tempWorkspace)) + const projects = result.workspace?.projects ?? [] + const workspaceProject = projects.find(project => project.isWorkspaceRootProject === true) + + expect(workspaceProject).toBeDefined() + expect(workspaceProject?.name).toBe(WORKSPACE_ROOT_PROJECT_NAME) + expect(workspaceProject?.rootMemoryPrompt?.content).toContain('Workspace prompt body') + expect(workspaceProject?.childMemoryPrompts).toBeUndefined() + expect(workspaceProject?.isPromptSourceProject).not.toBe(true) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('does not fall back to workspace/dist/workspace.mdx when aindex dist prompt is missing', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-workspace-prompt-fallback-')) + const wrongPromptPath = path.join(tempWorkspace, 'dist', 'workspace.mdx') + + try { + fs.mkdirSync(path.dirname(wrongPromptPath), {recursive: true}) + fs.writeFileSync(wrongPromptPath, 'Workspace prompt from the wrong place', 'utf8') + + const plugin = new ProjectPromptInputCapability() + const result = await plugin.collect(createContext(tempWorkspace)) + const projects = result.workspace?.projects ?? [] + + expect(projects.some(project => project.isWorkspaceRootProject === true)).toBe(false) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('inherits the prompt source project config for the synthetic workspace project', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-workspace-prompt-config-')) + const workspacePromptPath = path.join(tempWorkspace, 'aindex', 'dist', 'workspace.mdx') + const promptSourceProjectConfig = { + includeSeries: ['prompt-source-series'], + subSeries: { + skills: ['ship-*'] + } + } + + try { + fs.mkdirSync(path.dirname(workspacePromptPath), {recursive: true}) + fs.writeFileSync(workspacePromptPath, 'Workspace prompt body', 'utf8') + + const workspace = createWorkspace(tempWorkspace, [ + createProject(tempWorkspace, 'project-a', { + projectConfig: { + includeSeries: ['fallback-series'] + } + }), + createProject(tempWorkspace, 'project-b', { + isPromptSourceProject: true, + projectConfig: promptSourceProjectConfig + }) + ]) + + const plugin = new ProjectPromptInputCapability() + const result = await plugin.collect(createContext(tempWorkspace, workspace)) + const workspaceProject = result.workspace?.projects?.find(project => project.isWorkspaceRootProject === true) + + expect(workspaceProject?.projectConfig).toEqual(promptSourceProjectConfig) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('loads ext, arch, and softwares project prompts using the same agt.mdx workflow as app', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-project-prompt-series-')) + const extRoot = path.join(tempWorkspace, 'aindex', 'dist', 'ext', 'plugin-a') + const archRoot = path.join(tempWorkspace, 'aindex', 'dist', 'arch', 'system-a') + const softwareRoot = path.join(tempWorkspace, 'aindex', 'dist', 'softwares', 'tool-a') + + try { + fs.mkdirSync(path.join(extRoot, 'docs'), {recursive: true}) + fs.mkdirSync(path.join(archRoot, 'design'), {recursive: true}) + fs.mkdirSync(path.join(softwareRoot, 'manual'), {recursive: true}) + fs.writeFileSync(path.join(extRoot, 'agt.mdx'), 'Ext root prompt', 'utf8') + fs.writeFileSync(path.join(extRoot, 'docs', 'agt.mdx'), 'Ext child prompt', 'utf8') + fs.writeFileSync(path.join(archRoot, 'agt.mdx'), 'Arch root prompt', 'utf8') + fs.writeFileSync(path.join(archRoot, 'design', 'agt.mdx'), 'Arch child prompt', 'utf8') + fs.writeFileSync(path.join(softwareRoot, 'agt.mdx'), 'Software root prompt', 'utf8') + fs.writeFileSync(path.join(softwareRoot, 'manual', 'agt.mdx'), 'Software child prompt', 'utf8') + + const workspace = createWorkspace(tempWorkspace, [ + createProject(tempWorkspace, 'plugin-a', {promptSeries: 'ext'}), + createProject(tempWorkspace, 'system-a', {promptSeries: 'arch'}), + createProject(tempWorkspace, 'tool-a', {promptSeries: 'softwares'}) + ]) + + const plugin = new ProjectPromptInputCapability() + const result = await plugin.collect(createContext(tempWorkspace, workspace)) + const projects = result.workspace?.projects ?? [] + const extProject = projects.find(project => project.name === 'plugin-a') + const archProject = projects.find(project => project.name === 'system-a') + const softwareProject = projects.find(project => project.name === 'tool-a') + + expect(extProject?.rootMemoryPrompt?.content).toContain('Ext root prompt') + expect(extProject?.childMemoryPrompts?.[0]?.content).toContain('Ext child prompt') + expect(archProject?.rootMemoryPrompt?.content).toContain('Arch root prompt') + expect(archProject?.childMemoryPrompts?.[0]?.content).toContain('Arch child prompt') + expect(softwareProject?.rootMemoryPrompt?.content).toContain('Software root prompt') + expect(softwareProject?.childMemoryPrompts?.[0]?.content).toContain('Software child prompt') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/inputs/input-project-prompt.ts b/sdk/src/inputs/input-project-prompt.ts new file mode 100644 index 00000000..e5039491 --- /dev/null +++ b/sdk/src/inputs/input-project-prompt.ts @@ -0,0 +1,435 @@ +import type { + InputCapabilityContext, + InputCollectedContext, + Project, + ProjectChildrenMemoryPrompt, + ProjectRootMemoryPrompt, + YAMLFrontMatter +} from '../plugins/plugin-core' + +import process from 'node:process' + +import {CompilerDiagnosticError, ScopeError} from '@truenine/md-compiler/errors' +import {resolveAindexProjectSeriesConfig, resolveAindexProjectSeriesConfigs} from '@/aindex-project-series' +import {getGlobalConfigPath} from '@/ConfigLoader' +import { + buildConfigDiagnostic, + buildFileOperationDiagnostic, + buildPromptCompilerDiagnostic, + diagnosticLines +} from '@/diagnostics' +import {AbstractInputCapability, FilePathKind, PromptKind, WORKSPACE_ROOT_PROJECT_NAME} from '../plugins/plugin-core' +import {assertNoResidualModuleSyntax} from '../plugins/plugin-core/DistPromptGuards' +import {readPromptArtifact} from '../plugins/plugin-core/PromptArtifactCache' +import {formatPromptCompilerDiagnostic} from '../plugins/plugin-core/PromptCompilerDiagnostics' + +const PROJECT_MEMORY_FILE = 'agt.mdx' +const SCAN_SKIP_DIRECTORIES: readonly string[] = ['node_modules', '.git'] as const + +export class ProjectPromptInputCapability extends AbstractInputCapability { + constructor() { + super('ProjectPromptInputCapability', ['AindexInputCapability']) + } + + async collect(ctx: InputCapabilityContext): Promise> { + const {dependencyContext, fs, userConfigOptions: options, path, globalScope} = ctx + const {aindexDir} = this.resolveBasePaths(options) + const workspacePromptPath = this.resolveAindexPath(options.aindex.workspacePrompt.dist, aindexDir) + + const dependencyWorkspace = dependencyContext.workspace + if (dependencyWorkspace == null) { + this.log.info('No workspace found in dependency context, skipping project prompt enhancement') + return {} + } + + const projects = dependencyWorkspace.projects ?? [] + + const enhancedProjects = await Promise.all(projects.map(async project => { + const projectName = project.name + if (projectName == null) return project + if (project.isWorkspaceRootProject === true) return project + + const seriesConfigs = project.promptSeries != null + ? [resolveAindexProjectSeriesConfig(options, project.promptSeries)] + : resolveAindexProjectSeriesConfigs(options) + const matchingSeries = seriesConfigs.find(series => { + const shadowProjectPath = path.join(aindexDir, series.dist, projectName) + return fs.existsSync(shadowProjectPath) && fs.statSync(shadowProjectPath).isDirectory() + }) + if (matchingSeries == null) return project + + const shadowProjectPath = path.join(aindexDir, matchingSeries.dist, projectName) + + const targetProjectPath = project.dirFromWorkspacePath?.getAbsolutePath() + + const rootMemoryPrompt = await this.readRootMemoryPrompt(ctx, shadowProjectPath, globalScope) + const childMemoryPrompts = targetProjectPath != null + ? await this.scanChildMemoryPrompts(ctx, shadowProjectPath, targetProjectPath, globalScope) + : [] + + return { + ...project, + ...project.promptSeries == null ? {promptSeries: matchingSeries.name} : {}, + ...rootMemoryPrompt != null && {rootMemoryPrompt}, + ...childMemoryPrompts.length > 0 && {childMemoryPrompts} + } + })) + + const workspaceRootProject = await this.readWorkspaceRootProjectPrompt( + ctx, + workspacePromptPath, + globalScope, + this.resolveWorkspaceRootProjectConfig(projects) + ) + + return { + workspace: { + directory: dependencyWorkspace.directory, + projects: workspaceRootProject == null + ? enhancedProjects + : [...enhancedProjects, workspaceRootProject] + } + } + } + + private async readWorkspaceRootProjectPrompt( + ctx: InputCapabilityContext, + filePath: string, + globalScope: InputCapabilityContext['globalScope'], + projectConfig: Project['projectConfig'] + ): Promise { + const {fs, logger} = ctx + + if (!fs.existsSync(filePath) || !fs.statSync(filePath).isFile()) return + + try { + let artifact: Awaited> + try { + artifact = await readPromptArtifact(filePath, { + mode: 'dist', + globalScope + }) + assertNoResidualModuleSyntax(artifact.content, filePath) + } + catch (e) { + if (e instanceof CompilerDiagnosticError) { + logger.error(buildPromptCompilerDiagnostic({ + code: 'WORKSPACE_ROOT_MEMORY_PROMPT_COMPILE_FAILED', + title: 'Failed to compile workspace root memory prompt', + diagnosticText: formatPromptCompilerDiagnostic(e, { + operation: 'Failed to compile workspace root memory prompt.', + promptKind: 'workspace-root-memory', + logicalName: filePath, + distPath: filePath + }), + details: { + promptKind: 'workspace-root-memory', + distPath: filePath + } + })) + if (e instanceof ScopeError) { + const globalConfigPath = getGlobalConfigPath() + logger.error(buildConfigDiagnostic({ + code: 'WORKSPACE_ROOT_MEMORY_SCOPE_VARIABLES_MISSING', + title: 'Workspace root memory prompt references missing config variables', + reason: diagnosticLines( + `The workspace root memory prompt uses scope variables that are not defined in "${globalConfigPath}".` + ), + configPath: globalConfigPath, + exactFix: diagnosticLines( + `Define the missing variables in "${globalConfigPath}" and rerun tnmsc.` + ), + details: { + promptPath: filePath, + errorMessage: e.message + } + })) + } + process.exit(1) + } + throw e + } + + const rootMemoryPrompt: ProjectRootMemoryPrompt = { + type: PromptKind.ProjectRootMemory, + content: artifact.content, + length: artifact.content.length, + filePathKind: FilePathKind.Relative, + ...artifact.parsed.yamlFrontMatter != null && {yamlFrontMatter: artifact.parsed.yamlFrontMatter as YAMLFrontMatter}, + ...artifact.parsed.rawFrontMatter != null && {rawFrontMatter: artifact.parsed.rawFrontMatter}, + markdownAst: artifact.parsed.markdownAst, + markdownContents: artifact.parsed.markdownContents, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + } + } + + return { + name: WORKSPACE_ROOT_PROJECT_NAME, + isWorkspaceRootProject: true, + ...projectConfig != null && {projectConfig}, + rootMemoryPrompt + } + } + catch (e) { + logger.error(buildFileOperationDiagnostic({ + code: 'WORKSPACE_ROOT_MEMORY_PROMPT_READ_FAILED', + title: 'Failed to read workspace root memory prompt', + operation: 'read', + targetKind: 'workspace root memory prompt', + path: filePath, + error: e + })) + return void 0 + } + } + + private resolveWorkspaceRootProjectConfig(projects: readonly Project[]): Project['projectConfig'] { + const concreteProjects = projects.filter(project => project.isWorkspaceRootProject !== true) + const promptSourceProject = concreteProjects.find(project => project.isPromptSourceProject === true) + return promptSourceProject?.projectConfig ?? concreteProjects[0]?.projectConfig + } + + private async readRootMemoryPrompt( + ctx: InputCapabilityContext, + projectPath: string, + globalScope: InputCapabilityContext['globalScope'] + ): Promise { + const {fs, path, logger} = ctx + const filePath = path.join(projectPath, PROJECT_MEMORY_FILE) + + if (!fs.existsSync(filePath) || !fs.statSync(filePath).isFile()) return + + try { + let artifact: Awaited> + try { + artifact = await readPromptArtifact(filePath, { + mode: 'dist', + globalScope + }) + assertNoResidualModuleSyntax(artifact.content, filePath) + } + catch (e) { + if (e instanceof CompilerDiagnosticError) { + logger.error(buildPromptCompilerDiagnostic({ + code: 'PROJECT_ROOT_MEMORY_PROMPT_COMPILE_FAILED', + title: 'Failed to compile project root memory prompt', + diagnosticText: formatPromptCompilerDiagnostic(e, { + operation: 'Failed to compile project root memory prompt.', + promptKind: 'project-root-memory', + logicalName: filePath, + distPath: filePath + }), + details: { + promptKind: 'project-root-memory', + distPath: filePath + } + })) + if (e instanceof ScopeError) { + const globalConfigPath = getGlobalConfigPath() + logger.error(buildConfigDiagnostic({ + code: 'PROJECT_ROOT_MEMORY_SCOPE_VARIABLES_MISSING', + title: 'Project root memory prompt references missing config variables', + reason: diagnosticLines( + `The project root memory prompt uses scope variables that are not defined in "${globalConfigPath}".` + ), + configPath: globalConfigPath, + exactFix: diagnosticLines( + `Define the missing variables in "${globalConfigPath}" and rerun tnmsc.` + ), + details: { + promptPath: filePath, + errorMessage: e.message + } + })) + } + process.exit(1) + } + throw e + } + + return { + type: PromptKind.ProjectRootMemory, + content: artifact.content, + length: artifact.content.length, + filePathKind: FilePathKind.Relative, + ...artifact.parsed.yamlFrontMatter != null && {yamlFrontMatter: artifact.parsed.yamlFrontMatter as YAMLFrontMatter}, + ...artifact.parsed.rawFrontMatter != null && {rawFrontMatter: artifact.parsed.rawFrontMatter}, + markdownAst: artifact.parsed.markdownAst, + markdownContents: artifact.parsed.markdownContents, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + } + } + } + catch (e) { + logger.error(buildFileOperationDiagnostic({ + code: 'PROJECT_ROOT_MEMORY_PROMPT_READ_FAILED', + title: 'Failed to read project root memory prompt', + operation: 'read', + targetKind: 'project root memory prompt', + path: filePath, + error: e + })) + return void 0 + } + } + + private async scanChildMemoryPrompts( + ctx: InputCapabilityContext, + shadowProjectPath: string, + targetProjectPath: string, + globalScope: InputCapabilityContext['globalScope'] + ): Promise { + const {logger} = ctx + const prompts: ProjectChildrenMemoryPrompt[] = [] + + try { + await this.scanDirectoryRecursive(ctx, shadowProjectPath, shadowProjectPath, targetProjectPath, prompts, globalScope) + } + catch (e) { + logger.error(buildFileOperationDiagnostic({ + code: 'PROJECT_CHILD_MEMORY_SCAN_FAILED', + title: 'Failed to scan project child memory prompts', + operation: 'scan', + targetKind: 'project child memory prompt directory', + path: shadowProjectPath, + error: e + })) + } + + return prompts + } + + private async scanDirectoryRecursive( + ctx: InputCapabilityContext, + shadowProjectPath: string, + currentPath: string, + targetProjectPath: string, + prompts: ProjectChildrenMemoryPrompt[], + globalScope: InputCapabilityContext['globalScope'] + ): Promise { + const {fs, path} = ctx + + const entries = fs.readdirSync(currentPath, {withFileTypes: true}) + for (const entry of entries) { + if (!entry.isDirectory()) continue + + if (SCAN_SKIP_DIRECTORIES.includes(entry.name)) continue + + const childDir = path.join(currentPath, entry.name) + const memoryFile = path.join(childDir, PROJECT_MEMORY_FILE) + + if (Boolean(fs.existsSync(memoryFile)) && Boolean(fs.statSync(memoryFile).isFile())) { + const prompt = await this.readChildMemoryPrompt(ctx, shadowProjectPath, childDir, targetProjectPath, globalScope) + if (prompt != null) prompts.push(prompt) + } + + await this.scanDirectoryRecursive(ctx, shadowProjectPath, childDir, targetProjectPath, prompts, globalScope) + } + } + + private async readChildMemoryPrompt( + ctx: InputCapabilityContext, + shadowProjectPath: string, + shadowChildDir: string, + targetProjectPath: string, + globalScope: InputCapabilityContext['globalScope'] + ): Promise { + const {path, logger} = ctx + const filePath = path.join(shadowChildDir, PROJECT_MEMORY_FILE) + + try { + let artifact: Awaited> + try { + artifact = await readPromptArtifact(filePath, { + mode: 'dist', + globalScope + }) + assertNoResidualModuleSyntax(artifact.content, filePath) + } + catch (e) { + if (e instanceof CompilerDiagnosticError) { + logger.error(buildPromptCompilerDiagnostic({ + code: 'PROJECT_CHILD_MEMORY_PROMPT_COMPILE_FAILED', + title: 'Failed to compile project child memory prompt', + diagnosticText: formatPromptCompilerDiagnostic(e, { + operation: 'Failed to compile project child memory prompt.', + promptKind: 'project-child-memory', + logicalName: filePath, + distPath: filePath + }), + details: { + promptKind: 'project-child-memory', + distPath: filePath + } + })) + if (e instanceof ScopeError) { + const globalConfigPath = getGlobalConfigPath() + logger.error(buildConfigDiagnostic({ + code: 'PROJECT_CHILD_MEMORY_SCOPE_VARIABLES_MISSING', + title: 'Project child memory prompt references missing config variables', + reason: diagnosticLines( + `The project child memory prompt uses scope variables that are not defined in "${globalConfigPath}".` + ), + configPath: globalConfigPath, + exactFix: diagnosticLines( + `Define the missing variables in "${globalConfigPath}" and rerun tnmsc.` + ), + details: { + promptPath: filePath, + errorMessage: e.message + } + })) + } + process.exit(1) + } + throw e + } + + const relativePath = path.relative(shadowProjectPath, shadowChildDir) + const targetChildDir = path.join(targetProjectPath, relativePath) + const dirName = path.basename(shadowChildDir) + + return { + type: PromptKind.ProjectChildrenMemory, + content: artifact.content, + length: artifact.content.length, + filePathKind: FilePathKind.Relative, + ...artifact.parsed.yamlFrontMatter != null && {yamlFrontMatter: artifact.parsed.yamlFrontMatter as YAMLFrontMatter}, + ...artifact.parsed.rawFrontMatter != null && {rawFrontMatter: artifact.parsed.rawFrontMatter}, + markdownAst: artifact.parsed.markdownAst, + markdownContents: artifact.parsed.markdownContents, + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: targetProjectPath, + getDirectoryName: () => dirName, + getAbsolutePath: () => targetChildDir + }, + workingChildDirectoryPath: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: targetProjectPath, + getDirectoryName: () => dirName, + getAbsolutePath: () => targetChildDir + } + } + } + catch (e) { + logger.error(buildFileOperationDiagnostic({ + code: 'PROJECT_CHILD_MEMORY_PROMPT_READ_FAILED', + title: 'Failed to read project child memory prompt', + operation: 'read', + targetKind: 'project child memory prompt', + path: filePath, + error: e + })) + return void 0 + } + } +} diff --git a/sdk/src/inputs/input-public-config.test.ts b/sdk/src/inputs/input-public-config.test.ts new file mode 100644 index 00000000..08c658eb --- /dev/null +++ b/sdk/src/inputs/input-public-config.test.ts @@ -0,0 +1,450 @@ +import type {InputCapabilityContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import { + AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS, + PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, + PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, + resolvePublicDefinitionPath +} from '../public-config-paths' +import {EditorConfigInputCapability} from './input-editorconfig' +import {GitExcludeInputCapability} from './input-git-exclude' +import {GitIgnoreInputCapability} from './input-gitignore' +import {JetBrainsConfigInputCapability} from './input-jetbrains-config' +import {AIAgentIgnoreInputCapability} from './input-shared-ignore' +import {VSCodeConfigInputCapability} from './input-vscode-config' +import {ZedConfigInputCapability} from './input-zed-config' + +interface TestContextOptions { + readonly aindexDir?: string + readonly runtimeCommand?: InputCapabilityContext['runtimeCommand'] +} + +function createContext( + tempWorkspace: string, + options?: TestContextOptions +): InputCapabilityContext { + const mergedOptions = mergeConfig({ + workspaceDir: tempWorkspace, + ...(options?.aindexDir != null + ? { + aindex: { + dir: options.aindexDir + } + } + : {}) + }) + + return { + logger: createLogger('PublicConfigInputCapabilityTest', 'error'), + fs, + path, + glob, + userConfigOptions: mergedOptions, + dependencyContext: {}, + ...(options?.runtimeCommand != null + ? {runtimeCommand: options.runtimeCommand} + : {}) + } as InputCapabilityContext +} + +function writePublicDefinition( + tempWorkspace: string, + targetRelativePath: string, + content: string +): string { + const filePath = resolvePublicDefinitionPath( + path.join(tempWorkspace, 'aindex'), + targetRelativePath + ) + fs.mkdirSync(path.dirname(filePath), {recursive: true}) + fs.writeFileSync(filePath, content, 'utf8') + return filePath +} + +function writePublicProxy(tempWorkspace: string, source: string): string { + return writePublicDefinition(tempWorkspace, 'proxy.ts', source) +} + +describe('public config input plugins', () => { + it('reads config definitions from target-relative public paths', () => { + const tempWorkspace = fs.mkdtempSync( + path.join(os.tmpdir(), 'tnmsc-public-config-input-') + ) + + try { + const aindexDir = path.join(tempWorkspace, 'aindex') + const gitIgnorePath = writePublicDefinition( + tempWorkspace, + PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, + 'dist/\n' + ) + const gitExcludePath = writePublicDefinition( + tempWorkspace, + PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, + '.idea/\n' + ) + const editorConfigPath = writePublicDefinition( + tempWorkspace, + '.editorconfig', + 'root = true\n' + ) + writePublicDefinition( + tempWorkspace, + '.vscode/settings.json', + '{"editor.tabSize": 2}\n' + ) + writePublicDefinition( + tempWorkspace, + '.vscode/extensions.json', + '{"recommendations":["foo.bar"]}\n' + ) + writePublicDefinition( + tempWorkspace, + '.zed/settings.json', + '{"tab_size": 2}\n' + ) + writePublicDefinition( + tempWorkspace, + '.idea/.gitignore', + '/workspace.xml\n' + ) + writePublicDefinition( + tempWorkspace, + '.idea/codeStyles/Project.xml', + '\n' + ) + writePublicDefinition( + tempWorkspace, + '.idea/codeStyles/codeStyleConfig.xml', + '\n' + ) + + for (const fileName of AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS) + { writePublicDefinition(tempWorkspace, fileName, `${fileName}\n`) } + + const ctx = createContext(tempWorkspace) + const gitIgnore = new GitIgnoreInputCapability().collect(ctx) + const gitExclude = new GitExcludeInputCapability().collect(ctx) + const editorConfig = new EditorConfigInputCapability().collect(ctx) + const vscode = new VSCodeConfigInputCapability().collect(ctx) + const zed = new ZedConfigInputCapability().collect(ctx) + const jetbrains = new JetBrainsConfigInputCapability().collect(ctx) + const ignoreFiles = new AIAgentIgnoreInputCapability().collect(ctx) + + expect(gitIgnore.globalGitIgnore).toBe('dist/\n') + expect(gitExclude.shadowGitExclude).toBe('.idea/\n') + expect(editorConfig.editorConfigFiles?.[0]?.dir.path).toBe( + editorConfigPath + ) + expect(vscode.vscodeConfigFiles?.map(file => file.dir.path)).toEqual([ + path.join(aindexDir, 'public', '.vscode', 'settings.json'), + path.join(aindexDir, 'public', '.vscode', 'extensions.json') + ]) + expect(zed.zedConfigFiles?.map(file => file.dir.path)).toEqual([ + path.join(aindexDir, 'public', '.zed', 'settings.json') + ]) + expect( + jetbrains.jetbrainsConfigFiles?.map(file => file.dir.path) + ).toEqual([ + path.join(aindexDir, 'public', '.idea', 'codeStyles', 'Project.xml'), + path.join( + aindexDir, + 'public', + '.idea', + 'codeStyles', + 'codeStyleConfig.xml' + ), + path.join(aindexDir, 'public', '.idea', '.gitignore') + ]) + expect( + ignoreFiles.aiAgentIgnoreConfigFiles?.map(file => file.fileName) + ).toEqual([...AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS]) + expect( + ignoreFiles.aiAgentIgnoreConfigFiles?.map(file => file.sourcePath) + ).toEqual( + AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS.map(fileName => + resolvePublicDefinitionPath(aindexDir, fileName)) + ) + expect(gitIgnorePath).toBe(path.join(aindexDir, 'public', '.gitignore')) + expect(gitExcludePath).toBe( + path.join(aindexDir, 'public', '.git', 'info', 'exclude') + ) + } finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('does not read legacy definition locations after the hard cut', () => { + const tempWorkspace = fs.mkdtempSync( + path.join(os.tmpdir(), 'tnmsc-public-config-legacy-') + ) + + try { + const aindexDir = path.join(tempWorkspace, 'aindex') + fs.mkdirSync(path.join(aindexDir, 'public'), {recursive: true}) + fs.mkdirSync(path.join(aindexDir, '.vscode'), {recursive: true}) + fs.mkdirSync(path.join(aindexDir, '.zed'), {recursive: true}) + fs.mkdirSync(path.join(aindexDir, '.idea', 'codeStyles'), { + recursive: true + }) + + fs.writeFileSync( + path.join(aindexDir, 'public', 'gitignore'), + 'legacy gitignore\n', + 'utf8' + ) + fs.writeFileSync( + path.join(aindexDir, 'public', 'exclude'), + 'legacy exclude\n', + 'utf8' + ) + fs.writeFileSync( + path.join(aindexDir, '.editorconfig'), + 'root = true\n', + 'utf8' + ) + fs.writeFileSync( + path.join(aindexDir, '.vscode', 'settings.json'), + '{}\n', + 'utf8' + ) + fs.writeFileSync( + path.join(aindexDir, '.vscode', 'extensions.json'), + '{}\n', + 'utf8' + ) + fs.writeFileSync( + path.join(aindexDir, '.zed', 'settings.json'), + '{}\n', + 'utf8' + ) + fs.writeFileSync( + path.join(aindexDir, '.idea', '.gitignore'), + '/workspace.xml\n', + 'utf8' + ) + fs.writeFileSync( + path.join(aindexDir, '.idea', 'codeStyles', 'Project.xml'), + '\n', + 'utf8' + ) + fs.writeFileSync( + path.join(aindexDir, '.idea', 'codeStyles', 'codeStyleConfig.xml'), + '\n', + 'utf8' + ) + fs.writeFileSync( + path.join(aindexDir, '.cursorignore'), + '.cursor/\n', + 'utf8' + ) + + const ctx = createContext(tempWorkspace) + + expect( + new GitIgnoreInputCapability().collect(ctx).globalGitIgnore + ).toBeUndefined() + expect( + new GitExcludeInputCapability().collect(ctx).shadowGitExclude + ).toBeUndefined() + expect( + new EditorConfigInputCapability().collect(ctx).editorConfigFiles ?? [] + ).toHaveLength(0) + expect( + new VSCodeConfigInputCapability().collect(ctx).vscodeConfigFiles ?? [] + ).toHaveLength(0) + expect( + new ZedConfigInputCapability().collect(ctx).zedConfigFiles ?? [] + ).toHaveLength(0) + expect( + new JetBrainsConfigInputCapability().collect(ctx).jetbrainsConfigFiles ?? [] + ).toHaveLength(0) + expect( + new AIAgentIgnoreInputCapability().collect(ctx).aiAgentIgnoreConfigFiles ?? [] + ).toHaveLength(0) + } finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('routes public definitions through public/proxy.ts transparently', () => { + const tempWorkspace = fs.mkdtempSync( + path.join(os.tmpdir(), 'tnmsc-public-config-proxy-') + ) + + try { + const aindexDir = path.join(tempWorkspace, 'aindex') + writePublicProxy( + tempWorkspace, + [ + 'export default (logicalPath) => {', + ' const normalizedPath = logicalPath.replaceAll("\\\\", "/")', + ' if (normalizedPath.startsWith(".git/")) return normalizedPath.replace(/^\\.git\\//, "____.git/")', + ' if (normalizedPath === ".idea/.gitignore") return ".idea/.gitignore"', + ' if (normalizedPath.startsWith(".idea/")) return normalizedPath', + ' if (!normalizedPath.startsWith(".")) return normalizedPath', + ' return normalizedPath.replace(/^\\.([^/\\\\]+)/, "____$1")', + '}', + '' + ].join('\n') + ) + + const gitIgnorePath = writePublicDefinition( + tempWorkspace, + PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, + 'dist/\n' + ) + const gitExcludePath = writePublicDefinition( + tempWorkspace, + PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, + '.idea/\n' + ) + const editorConfigPath = writePublicDefinition( + tempWorkspace, + '.editorconfig', + 'root = true\n' + ) + const vscodeSettingsPath = writePublicDefinition( + tempWorkspace, + '.vscode/settings.json', + '{"editor.tabSize": 2}\n' + ) + const vscodeExtensionsPath = writePublicDefinition( + tempWorkspace, + '.vscode/extensions.json', + '{"recommendations":["foo.bar"]}\n' + ) + const zedSettingsPath = writePublicDefinition( + tempWorkspace, + '.zed/settings.json', + '{"tab_size": 2}\n' + ) + const ideaGitIgnorePath = writePublicDefinition( + tempWorkspace, + '.idea/.gitignore', + '/workspace.xml\n' + ) + const ideaProjectPath = writePublicDefinition( + tempWorkspace, + '.idea/codeStyles/Project.xml', + '\n' + ) + const ideaCodeStyleConfigPath = writePublicDefinition( + tempWorkspace, + '.idea/codeStyles/codeStyleConfig.xml', + '\n' + ) + + for (const fileName of AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS) + { writePublicDefinition(tempWorkspace, fileName, `${fileName}\n`) } + + const ctx = createContext(tempWorkspace) + const gitIgnore = new GitIgnoreInputCapability().collect(ctx) + const gitExclude = new GitExcludeInputCapability().collect(ctx) + const editorConfig = new EditorConfigInputCapability().collect(ctx) + const vscode = new VSCodeConfigInputCapability().collect(ctx) + const zed = new ZedConfigInputCapability().collect(ctx) + const jetbrains = new JetBrainsConfigInputCapability().collect(ctx) + const ignoreFiles = new AIAgentIgnoreInputCapability().collect(ctx) + + expect(gitIgnore.globalGitIgnore).toBe('dist/\n') + expect(gitExclude.shadowGitExclude).toBe('.idea/\n') + expect(editorConfig.editorConfigFiles?.[0]?.dir.path).toBe( + editorConfigPath + ) + expect(vscode.vscodeConfigFiles?.map(file => file.dir.path)).toEqual([ + vscodeSettingsPath, + vscodeExtensionsPath + ]) + expect(zed.zedConfigFiles?.map(file => file.dir.path)).toEqual([ + zedSettingsPath + ]) + expect( + jetbrains.jetbrainsConfigFiles?.map(file => file.dir.path) + ).toEqual([ideaProjectPath, ideaCodeStyleConfigPath, ideaGitIgnorePath]) + expect( + ignoreFiles.aiAgentIgnoreConfigFiles?.map(file => file.sourcePath) + ).toEqual( + AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS.map(fileName => + resolvePublicDefinitionPath(aindexDir, fileName)) + ) + expect(gitIgnorePath).toBe( + path.join(aindexDir, 'public', '____gitignore') + ) + expect(gitExcludePath).toBe( + path.join(aindexDir, 'public', '____.git', 'info', 'exclude') + ) + expect(editorConfigPath).toBe( + path.join(aindexDir, 'public', '____editorconfig') + ) + expect(vscodeSettingsPath).toBe( + path.join(aindexDir, 'public', '____vscode', 'settings.json') + ) + expect(vscodeExtensionsPath).toBe( + path.join(aindexDir, 'public', '____vscode', 'extensions.json') + ) + expect(zedSettingsPath).toBe( + path.join(aindexDir, 'public', '____zed', 'settings.json') + ) + expect(ideaGitIgnorePath).toBe( + path.join(aindexDir, 'public', '.idea', '.gitignore') + ) + expect(ideaProjectPath).toBe( + path.join(aindexDir, 'public', '.idea', 'codeStyles', 'Project.xml') + ) + expect(ideaCodeStyleConfigPath).toBe( + path.join( + aindexDir, + 'public', + '.idea', + 'codeStyles', + 'codeStyleConfig.xml' + ) + ) + } finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('passes the configured workspace root into public/proxy.ts', () => { + const tempWorkspace = fs.mkdtempSync( + path.join(os.tmpdir(), 'tnmsc-public-config-nested-aindex-') + ) + + try { + const aindexDir = path.join(tempWorkspace, 'config', 'aindex') + const publicDir = path.join(aindexDir, 'public') + fs.mkdirSync(path.join(publicDir, 'expected'), {recursive: true}) + fs.writeFileSync( + path.join(publicDir, 'proxy.ts'), + [ + 'export default (_logicalPath, ctx) => {', + ` return ctx.workspaceDir === ${JSON.stringify(tempWorkspace)} && ctx.cwd === ${JSON.stringify(tempWorkspace)}`, + ' ? "expected/.gitignore"', + ' : "unexpected/.gitignore"', + '}', + '' + ].join('\n'), + 'utf8' + ) + fs.writeFileSync( + path.join(publicDir, 'expected', '.gitignore'), + 'dist/\n', + 'utf8' + ) + + const ctx = createContext(tempWorkspace, {aindexDir: 'config/aindex'}) + const gitIgnore = new GitIgnoreInputCapability().collect(ctx) + + expect(gitIgnore.globalGitIgnore).toBe('dist/\n') + } finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/inputs/input-readme.test.ts b/sdk/src/inputs/input-readme.test.ts new file mode 100644 index 00000000..9b4eec89 --- /dev/null +++ b/sdk/src/inputs/input-readme.test.ts @@ -0,0 +1,49 @@ +import type {InputCapabilityContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it, vi} from 'vitest' +import {mergeConfig} from '../config' +import {ReadmeMdInputCapability} from './input-readme' + +function createContext(tempWorkspace: string, logger: InputCapabilityContext['logger']): InputCapabilityContext { + return { + logger, + fs, + path, + glob, + userConfigOptions: mergeConfig({workspaceDir: tempWorkspace}), + dependencyContext: {} + } as InputCapabilityContext +} + +describe('readme input capability project series validation', () => { + it('fails fast when app, ext, arch, and softwares reuse the same project name', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-readme-series-conflict-')) + const error = vi.fn() + const logger = { + error, + warn: vi.fn(), + info: vi.fn(), + debug: vi.fn(), + trace: vi.fn(), + fatal: vi.fn() + } as InputCapabilityContext['logger'] + + try { + fs.mkdirSync(path.join(tempWorkspace, 'aindex', 'app', 'project-a'), {recursive: true}) + fs.mkdirSync(path.join(tempWorkspace, 'aindex', 'softwares', 'project-a'), {recursive: true}) + + await expect(new ReadmeMdInputCapability().collect(createContext(tempWorkspace, logger))) + .rejects + .toThrow('Readme project series name conflict') + expect(error).toHaveBeenCalledWith(expect.objectContaining({ + code: 'README_PROJECT_SERIES_NAME_CONFLICT' + })) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/inputs/input-readme.ts b/sdk/src/inputs/input-readme.ts new file mode 100644 index 00000000..d1fcb11d --- /dev/null +++ b/sdk/src/inputs/input-readme.ts @@ -0,0 +1,270 @@ +import type {InputCapabilityContext, InputCollectedContext, ReadmeFileKind, ReadmePrompt, RelativePath} from '../plugins/plugin-core' + +import process from 'node:process' + +import {CompilerDiagnosticError, ScopeError} from '@truenine/md-compiler/errors' +import { + collectAindexProjectSeriesProjectNameConflicts, + resolveAindexProjectSeriesConfigs +} from '@/aindex-project-series' +import {getGlobalConfigPath} from '@/ConfigLoader' +import { + buildConfigDiagnostic, + buildFileOperationDiagnostic, + buildPromptCompilerDiagnostic, + diagnosticLines +} from '@/diagnostics' +import {AbstractInputCapability, FilePathKind, PromptKind, README_FILE_KIND_MAP} from '../plugins/plugin-core' +import {assertNoResidualModuleSyntax} from '../plugins/plugin-core/DistPromptGuards' +import {readPromptArtifact} from '../plugins/plugin-core/PromptArtifactCache' +import {formatPromptCompilerDiagnostic} from '../plugins/plugin-core/PromptCompilerDiagnostics' + +const ALL_FILE_KINDS = Object.entries(README_FILE_KIND_MAP) as [ReadmeFileKind, {src: string, out: string}][] + +export class ReadmeMdInputCapability extends AbstractInputCapability { + constructor() { + super('ReadmeMdInputCapability', ['AindexInputCapability']) + } + + async collect(ctx: InputCapabilityContext): Promise> { + const {userConfigOptions: options, logger, fs, path, globalScope} = ctx + const {workspaceDir, aindexDir} = this.resolveBasePaths(options) + const readmePrompts: ReadmePrompt[] = [] + const projectSeries = resolveAindexProjectSeriesConfigs(options) + const projectRefs = projectSeries.flatMap(series => { + const seriesSourceDir = this.resolveAindexPath(series.src, aindexDir) + if (!(fs.existsSync(seriesSourceDir) && fs.statSync(seriesSourceDir).isDirectory())) return [] + + return fs + .readdirSync(seriesSourceDir, {withFileTypes: true}) + .filter(entry => entry.isDirectory()) + .map(entry => ({ + projectName: entry.name, + seriesName: series.name, + seriesDir: path.join(seriesSourceDir, entry.name) + })) + }) + const conflicts = collectAindexProjectSeriesProjectNameConflicts(projectRefs) + if (conflicts.length > 0) { + logger.error(buildConfigDiagnostic({ + code: 'README_PROJECT_SERIES_NAME_CONFLICT', + title: 'Readme project names must be unique across app, ext, arch, and softwares', + reason: diagnosticLines( + 'Readme-family outputs target bare workspace project directories, so app/ext/arch/softwares cannot reuse the same project directory name.', + `Conflicting project names: ${conflicts.map(conflict => conflict.projectName).join(', ')}` + ), + exactFix: diagnosticLines( + 'Rename the conflicting project directory in one of the app/ext/arch/softwares source trees and rerun tnmsc.' + ), + possibleFixes: conflicts.map(conflict => diagnosticLines( + `"${conflict.projectName}" is currently declared in: ${conflict.refs.map(ref => `${ref.seriesName} (${ref.seriesDir})`).join(', ')}` + )), + details: { + aindexDir, + conflicts: conflicts.map(conflict => ({ + projectName: conflict.projectName, + refs: conflict.refs.map(ref => ({ + seriesName: ref.seriesName, + seriesDir: ref.seriesDir + })) + })) + } + })) + + throw new Error('Readme project series name conflict') + } + + await Promise.all(projectSeries.map(async series => { + const aindexProjectsDir = this.resolveAindexPath(series.dist, aindexDir) + if (!(fs.existsSync(aindexProjectsDir) && fs.statSync(aindexProjectsDir).isDirectory())) { + logger.debug('aindex project series directory does not exist', {path: aindexProjectsDir, series: series.name}) + return + } + + try { + const projectEntries = fs + .readdirSync(aindexProjectsDir, {withFileTypes: true}) + .filter(entry => entry.isDirectory()) + .sort((a, b) => a.name.localeCompare(b.name)) + + for (const projectEntry of projectEntries) { + const projectName = projectEntry.name + const projectDir = path.join(aindexProjectsDir, projectName) + + await this.collectReadmeFiles( + ctx, + projectDir, + projectName, + workspaceDir, + '', + readmePrompts, + globalScope + ) + } + } + catch (e) { + logger.error(buildFileOperationDiagnostic({ + code: 'README_PROJECT_SCAN_FAILED', + title: `Failed to scan aindex ${series.name} projects for readme prompts`, + operation: 'scan', + targetKind: `aindex ${series.name} project directory`, + path: aindexProjectsDir, + error: e + })) + } + })) + + readmePrompts.sort((a, b) => { + const projectDiff = a.projectName.localeCompare(b.projectName) + if (projectDiff !== 0) return projectDiff + + const targetDiff = a.targetDir.path.localeCompare(b.targetDir.path) + if (targetDiff !== 0) return targetDiff + + return a.fileKind.localeCompare(b.fileKind) + }) + + return {readmePrompts} + } + + private async collectReadmeFiles( + ctx: InputCapabilityContext, + currentDir: string, + projectName: string, + workspaceDir: string, + relativePath: string, + readmePrompts: ReadmePrompt[], + globalScope: InputCapabilityContext['globalScope'] + ): Promise { + const {fs, path, logger} = ctx + const isRoot = relativePath === '' + + for (const [fileKind, {src}] of ALL_FILE_KINDS) { + const filePath = path.join(currentDir, src) + if (!fs.existsSync(filePath) || !fs.statSync(filePath).isFile()) continue + + try { + let content: string + try { + const artifact = await readPromptArtifact(filePath, { + mode: 'dist', + globalScope + }) + const {content: compiledContent} = artifact + content = compiledContent + assertNoResidualModuleSyntax(content, filePath) + } + catch (e) { + if (e instanceof CompilerDiagnosticError) { + logger.error(buildPromptCompilerDiagnostic({ + code: 'README_PROMPT_COMPILE_FAILED', + title: 'Failed to compile readme-family prompt', + diagnosticText: formatPromptCompilerDiagnostic(e, { + operation: 'Failed to compile readme-family prompt.', + promptKind: 'readme-family', + logicalName: `${projectName}/${src}`, + distPath: filePath + }), + details: { + promptKind: 'readme-family', + distPath: filePath, + projectName, + fileKind + } + })) + if (e instanceof ScopeError) { + const globalConfigPath = getGlobalConfigPath() + logger.error(buildConfigDiagnostic({ + code: 'README_SCOPE_VARIABLES_MISSING', + title: 'Readme-family prompt references missing config variables', + reason: diagnosticLines( + `The readme-family prompt uses scope variables that are not defined in "${globalConfigPath}".` + ), + configPath: globalConfigPath, + exactFix: diagnosticLines( + `Define the missing variables in "${globalConfigPath}" and rerun tnmsc.` + ), + details: { + promptPath: filePath, + errorMessage: e.message + } + })) + } + process.exit(1) + } + throw e + } + + // Readme-family outputs intentionally land in /. + // Cross-series duplicate project names are rejected earlier to keep this + // workspace mapping deterministic and overwrite-free. + const targetPath = isRoot ? projectName : path.join(projectName, relativePath) + + const targetDir: RelativePath = { + pathKind: FilePathKind.Relative, + path: targetPath, + basePath: workspaceDir, + getDirectoryName: () => isRoot ? projectName : path.basename(relativePath), + getAbsolutePath: () => path.resolve(workspaceDir, targetPath) + } + + const dir: RelativePath = { + pathKind: FilePathKind.Relative, + path: path.dirname(filePath), + basePath: workspaceDir, + getDirectoryName: () => path.basename(path.dirname(filePath)), + getAbsolutePath: () => path.dirname(filePath) + } + + readmePrompts.push({ + type: PromptKind.Readme, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + projectName, + targetDir, + isRoot, + fileKind, + markdownContents: [], + dir + }) + } + catch (e) { + logger.warn(buildFileOperationDiagnostic({ + code: 'README_PROMPT_READ_FAILED', + title: 'Failed to read readme-family file', + operation: 'read', + targetKind: 'readme-family prompt file', + path: filePath, + error: e, + details: { + fileKind + } + })) + } + } + + try { + const entries = fs.readdirSync(currentDir, {withFileTypes: true}) + + for (const entry of entries) { + if (entry.isDirectory()) { + const subRelativePath = isRoot ? entry.name : path.join(relativePath, entry.name) + const subDir = path.join(currentDir, entry.name) + + await this.collectReadmeFiles(ctx, subDir, projectName, workspaceDir, subRelativePath, readmePrompts, globalScope) + } + } + } + catch (e) { + logger.warn(buildFileOperationDiagnostic({ + code: 'README_DIRECTORY_SCAN_FAILED', + title: 'Failed to scan readme-family directory', + operation: 'scan', + targetKind: 'readme-family directory', + path: currentDir, + error: e + })) + } + } +} diff --git a/sdk/src/inputs/input-rule.test.ts b/sdk/src/inputs/input-rule.test.ts new file mode 100644 index 00000000..a91b2655 --- /dev/null +++ b/sdk/src/inputs/input-rule.test.ts @@ -0,0 +1,93 @@ +import type {InputCapabilityContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import {RuleInputCapability} from './input-rule' + +function createContext(tempWorkspace: string): InputCapabilityContext { + const options = mergeConfig({workspaceDir: tempWorkspace}) + + return { + logger: createLogger('RuleInputCapabilityTest', 'error'), + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputCapabilityContext +} + +describe('rule input plugin', () => { + it('fails hard when source exists without a compiled dist pair', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-rule-src-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const srcDir = path.join(aindexDir, 'rules', 'qa') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.writeFileSync( + path.join(srcDir, 'boot.src.mdx'), + '---\ndescription: source only\nglobs:\n - "**/*.ts"\n---\nSource only rule', + 'utf8' + ) + + const plugin = new RuleInputCapability() + await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Missing compiled dist prompt') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('loads rules from dist when the source tree is missing', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-rule-dist-only-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const distDir = path.join(aindexDir, 'dist', 'rules', 'qa') + + try { + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync( + path.join(distDir, 'boot.mdx'), + '---\nscope: global\ndescription: Dist only rule\nglobs:\n - "**/*.ts"\n---\nDist only rule', + 'utf8' + ) + + const plugin = new RuleInputCapability() + const result = await plugin.collect(createContext(tempWorkspace)) + + expect(result.rules?.length ?? 0).toBe(1) + expect(result.rules?.[0]?.ruleName).toBe('boot') + expect(result.rules?.[0]?.content).toContain('Dist only rule') + expect(result.rules?.[0]?.scope).toBe('global') + expect(result.rules?.[0]?.globs).toEqual(['**/*.ts']) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('rejects workspace as an unsupported rule scope', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-rule-workspace-scope-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const distDir = path.join(aindexDir, 'dist', 'rules', 'qa') + + try { + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync( + path.join(distDir, 'boot.mdx'), + '---\nscope: workspace\ndescription: Dist only rule\nglobs:\n - "**/*.ts"\n---\nDist only rule', + 'utf8' + ) + + const plugin = new RuleInputCapability() + await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Field "scope" must be "project" or "global"') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/inputs/input-rule.ts b/sdk/src/inputs/input-rule.ts new file mode 100644 index 00000000..e0810657 --- /dev/null +++ b/sdk/src/inputs/input-rule.ts @@ -0,0 +1,103 @@ +import type { + InputCapabilityContext, + InputCollectedContext, + RulePrompt, + RuleScope, + RuleYAMLFrontMatter +} from '../plugins/plugin-core' +import {buildFileOperationDiagnostic} from '@/diagnostics' +import { + AbstractInputCapability, + createLocalizedPromptReader, + FilePathKind, + PromptKind, + SourceLocaleExtensions, + validateRuleMetadata +} from '../plugins/plugin-core' + +export class RuleInputCapability extends AbstractInputCapability { + constructor() { + super('RuleInputCapability') + } + + override async collect(ctx: InputCapabilityContext): Promise> { + const {userConfigOptions: options, logger, path, fs, globalScope} = ctx + const resolvedPaths = this.resolveBasePaths(options) + + const srcDir = this.resolveAindexPath(options.aindex.rules.src, resolvedPaths.aindexDir) + const distDir = this.resolveAindexPath(options.aindex.rules.dist, resolvedPaths.aindexDir) + + const reader = createLocalizedPromptReader(fs, path, logger, globalScope) + + const {prompts: localizedRulesFromSrc, errors} = await reader.readFlatFiles( + srcDir, + distDir, + { + kind: PromptKind.Rule, + localeExtensions: SourceLocaleExtensions, + hydrateSourceContents: false, + isDirectoryStructure: false, + createPrompt: async (content, _locale, name, metadata) => { + const yamlFrontMatter = metadata as RuleYAMLFrontMatter | undefined + const filePath = path.join(distDir, `${name}.mdx`) + if (yamlFrontMatter != null) { + const validation = validateRuleMetadata(yamlFrontMatter as Record, filePath) + if (!validation.valid) throw new Error(validation.errors.join('\n')) + } + const globs = yamlFrontMatter?.globs ?? [] + const scope: RuleScope = yamlFrontMatter?.scope ?? 'project' + const seriName = yamlFrontMatter?.seriName as string | undefined + const normalizedName = name.replaceAll('\\', '/') // Normalize path separator for cross-platform compatibility + const prefix = normalizedName.includes('/') ? normalizedName.split('/')[0] ?? '' : '' + const ruleName = normalizedName.split('/').pop() ?? normalizedName + + const rulePrompt = { + type: PromptKind.Rule, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: `${name}.mdx`, + basePath: distDir, + getDirectoryName: () => ruleName, + getAbsolutePath: () => filePath + }, + prefix, + ruleName, + globs, + scope, + markdownContents: [] + } as RulePrompt + + if (yamlFrontMatter != null) Object.assign(rulePrompt, {yamlFrontMatter}) + if (seriName != null) Object.assign(rulePrompt, {seriName}) + + return rulePrompt + } + } + ) + + for (const error of errors) { + logger.warn(buildFileOperationDiagnostic({ + code: 'RULE_PROMPT_READ_FAILED', + title: 'Failed to read rule prompt', + operation: error.phase === 'scan' ? 'scan' : 'read', + targetKind: 'rule prompt', + path: error.path, + error: error.error, + details: { + phase: error.phase + } + })) + } + + if (errors.length > 0) throw new Error(errors.map(error => error.error instanceof Error ? error.error.message : String(error.error)).join('\n')) + + return { + rules: localizedRulesFromSrc + .map(r => r.dist?.prompt) + .filter((rule): rule is RulePrompt => rule != null) + } + } +} diff --git a/sdk/src/inputs/input-shared-ignore.ts b/sdk/src/inputs/input-shared-ignore.ts new file mode 100644 index 00000000..0ed59d67 --- /dev/null +++ b/sdk/src/inputs/input-shared-ignore.ts @@ -0,0 +1,35 @@ +import type {AIAgentIgnoreConfigFile, InputCapabilityContext, InputCollectedContext} from '../plugins/plugin-core' +import {AbstractInputCapability} from '../plugins/plugin-core' +import {AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS, resolvePublicDefinitionPath} from '../public-config-paths' + +export class AIAgentIgnoreInputCapability extends AbstractInputCapability { + constructor() { + super('AIAgentIgnoreInputCapability') + } + + collect(ctx: InputCapabilityContext): Partial { + const {workspaceDir, aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) + const results: AIAgentIgnoreConfigFile[] = [] + + for (const fileName of AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS) { + const filePath = resolvePublicDefinitionPath(aindexDir, fileName, { + command: ctx.runtimeCommand, + workspaceDir + }) + if (!ctx.fs.existsSync(filePath)) { + this.log.debug({action: 'collect', message: 'Ignore file not found', path: filePath}) + continue + } + const content = ctx.fs.readFileSync(filePath, 'utf8') + if (content.length === 0) { + this.log.debug({action: 'collect', message: 'Ignore file is empty', path: filePath}) + continue + } + results.push({fileName, content, sourcePath: filePath}) + this.log.debug({action: 'collect', message: 'Loaded ignore file', path: filePath, fileName}) + } + + if (results.length === 0) return {} + return {aiAgentIgnoreConfigFiles: results} + } +} diff --git a/sdk/src/inputs/input-subagent.test.ts b/sdk/src/inputs/input-subagent.test.ts new file mode 100644 index 00000000..6567c128 --- /dev/null +++ b/sdk/src/inputs/input-subagent.test.ts @@ -0,0 +1,224 @@ +import type {InputCapabilityContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import {SubAgentInputCapability} from './input-subagent' + +function createContext(tempWorkspace: string): InputCapabilityContext { + const options = mergeConfig({workspaceDir: tempWorkspace}) + + return { + logger: createLogger('SubAgentInputCapabilityTest', 'error'), + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputCapabilityContext +} + +describe('subagent input plugin', () => { + it('prefers dist content, and dist also runs mdx-to-md compilation', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const srcDir = path.join(aindexDir, 'subagents') + const distDir = path.join(aindexDir, 'dist', 'subagents') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + + const srcFile = path.join(srcDir, 'demo.src.mdx') + const distFile = path.join(distDir, 'demo.mdx') + fs.writeFileSync(srcFile, '---\ndescription: src\n---\nSubAgent source', 'utf8') + fs.writeFileSync(distFile, '---\ndescription: dist\n---\nexport const x = 1\n\nSubAgent dist', 'utf8') + + const plugin = new SubAgentInputCapability() + const result = await plugin.collect(createContext(tempWorkspace)) + + expect(result.subAgents?.length ?? 0).toBe(1) + expect(result.subAgents?.[0]?.agentName).toBe('demo') + expect(result.subAgents?.[0]?.canonicalName).toBe('demo') + expect(result.subAgents?.[0]?.content).toContain('SubAgent dist') + expect(result.subAgents?.[0]?.content).not.toContain('SubAgent source') + expect(result.subAgents?.[0]?.content).not.toContain('export const x = 1') + expect(result.subAgents?.[0]?.yamlFrontMatter?.description).toBe('dist') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('extracts directory name as subagent prefix', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-prefix-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const srcDir = path.join(aindexDir, 'subagents', 'qa') + const distDir = path.join(aindexDir, 'dist', 'subagents', 'qa') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + + const srcFile = path.join(srcDir, 'boot.src.mdx') + const distFile = path.join(distDir, 'boot.mdx') + fs.writeFileSync(srcFile, '---\ndescription: qa boot src\n---\nSubAgent source', 'utf8') + fs.writeFileSync(distFile, '---\ndescription: qa boot dist\n---\nSubAgent dist', 'utf8') + + const plugin = new SubAgentInputCapability() + const result = await plugin.collect(createContext(tempWorkspace)) + const [subAgent] = result.subAgents ?? [] + + expect(result.subAgents?.length ?? 0).toBe(1) + expect(subAgent?.agentPrefix).toBe('qa') + expect(subAgent?.agentName).toBe('boot') + expect(subAgent?.canonicalName).toBe('qa-boot') + expect(subAgent?.content).toContain('SubAgent dist') + expect(subAgent?.content).not.toContain('SubAgent source') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('keeps rawMdxContent from dist for output-side recompilation', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-rawmdx-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const srcDir = path.join(aindexDir, 'subagents') + const distDir = path.join(aindexDir, 'dist', 'subagents') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + + const srcFile = path.join(srcDir, 'demo.src.mdx') + const distFile = path.join(distDir, 'demo.mdx') + fs.writeFileSync(srcFile, '---\ndescription: src\n---\nSubAgent source', 'utf8') + fs.writeFileSync(distFile, '---\ndescription: dist\n---\nexport const x = 1\n\nSubAgent dist', 'utf8') + + const plugin = new SubAgentInputCapability() + const result = await plugin.collect(createContext(tempWorkspace)) + const [subAgent] = result.subAgents ?? [] + + expect(subAgent?.rawMdxContent).toContain('export const x = 1') + expect(subAgent?.content).toContain('SubAgent dist') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('loads subagents from dist when the source tree is missing', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-dist-only-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const distDir = path.join(aindexDir, 'dist', 'subagents') + + try { + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync( + path.join(distDir, 'demo.mdx'), + '---\ndescription: dist only\n---\nDist only subagent', + 'utf8' + ) + + const plugin = new SubAgentInputCapability() + const result = await plugin.collect(createContext(tempWorkspace)) + + expect(result.subAgents?.length ?? 0).toBe(1) + expect(result.subAgents?.[0]?.agentName).toBe('demo') + expect(result.subAgents?.[0]?.canonicalName).toBe('demo') + expect(result.subAgents?.[0]?.content).toContain('Dist only subagent') + expect(result.subAgents?.[0]?.yamlFrontMatter?.description).toBe('dist only') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('fails hard when source exists without a compiled dist pair', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-source-only-test-')) + const srcDir = path.join(tempWorkspace, 'aindex', 'subagents') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.writeFileSync( + path.join(srcDir, 'demo.src.mdx'), + '---\ndescription: source only\n---\nSource only subagent', + 'utf8' + ) + + const plugin = new SubAgentInputCapability() + await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Missing compiled dist prompt') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('rejects workspace as an unsupported subagent scope', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-workspace-scope-test-')) + const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'subagents') + + try { + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync( + path.join(distDir, 'demo.mdx'), + '---\ndescription: dist only\nscope: workspace\n---\nDist only subagent', + 'utf8' + ) + + const plugin = new SubAgentInputCapability() + await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Field "scope" must be "project" or "global"') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('warns and ignores authored subagent names', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-name-warning-test-')) + const warnings: string[] = [] + const aindexDir = path.join(tempWorkspace, 'aindex') + const srcDir = path.join(aindexDir, 'subagents', 'qa') + const distDir = path.join(aindexDir, 'dist', 'subagents', 'qa') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + + fs.writeFileSync(path.join(srcDir, 'boot.src.mdx'), '---\nname: review-helper\ndescription: src\n---\nSubAgent source', 'utf8') + fs.writeFileSync(path.join(distDir, 'boot.mdx'), '---\nname: review-helper\ndescription: dist\n---\nSubAgent dist', 'utf8') + + const logger = { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: diagnostic => warnings.push(diagnostic.code), + error: () => {}, + fatal: () => {} + } + + const options = mergeConfig({workspaceDir: tempWorkspace}) + const plugin = new SubAgentInputCapability() + const result = await plugin.collect({ + logger, + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputCapabilityContext) + + const [subAgent] = result.subAgents ?? [] + expect(subAgent?.canonicalName).toBe('qa-boot') + expect('name' in (subAgent?.yamlFrontMatter ?? {})).toBe(false) + expect(warnings).toContain('SUBAGENT_NAME_IGNORED') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/inputs/input-subagent.ts b/sdk/src/inputs/input-subagent.ts new file mode 100644 index 00000000..ebbc0b06 --- /dev/null +++ b/sdk/src/inputs/input-subagent.ts @@ -0,0 +1,179 @@ +import type { + InputCapabilityContext, + InputCollectedContext, + Locale, + SubAgentPrompt, + SubAgentYAMLFrontMatter +} from '../plugins/plugin-core' +import {buildConfigDiagnostic, buildFileOperationDiagnostic, diagnosticLines} from '@/diagnostics' +import { + AbstractInputCapability, + createLocalizedPromptReader, + deriveSubAgentIdentity, + FilePathKind, + PromptKind, + SourceLocaleExtensions, + validateSubAgentMetadata + +} from '../plugins/plugin-core' + +export class SubAgentInputCapability extends AbstractInputCapability { + constructor() { + super('SubAgentInputCapability') + } + + private createSubAgentPrompt( + content: string, + _locale: Locale, + name: string, + srcDir: string, + distDir: string, + ctx: InputCapabilityContext, + metadata?: Record, + warnedDerivedNames?: Set + ): SubAgentPrompt { + const {fs, logger, path} = ctx + const {agentPrefix, agentName, canonicalName} = deriveSubAgentIdentity(name) + + const filePath = path.join(distDir, `${name}.mdx`) + const entryName = `${name}.mdx` + const sourceFilePath = fs.existsSync(path.join(srcDir, `${name}.src.mdx`)) + ? path.join(srcDir, `${name}.src.mdx`) + : filePath + const yamlFrontMatter = metadata == null + ? void 0 + : (() => { + const frontMatter = {...metadata} + const authoredName = frontMatter['name'] + + if (typeof authoredName === 'string' && authoredName.trim().length > 0 && warnedDerivedNames?.has(sourceFilePath) !== true) { + warnedDerivedNames?.add(sourceFilePath) + logger.warn(buildConfigDiagnostic({ + code: 'SUBAGENT_NAME_IGNORED', + title: 'Sub-agent authored name is ignored', + reason: diagnosticLines( + `tnmsc ignores the authored sub-agent name "${authoredName}" in favor of the derived path name "${canonicalName}".` + ), + configPath: sourceFilePath, + exactFix: diagnosticLines( + 'Remove the `name` field from the sub-agent front matter or exported metadata.', + 'Rename the sub-agent directory or file if you need a different sub-agent name.' + ), + details: { + authoredName, + derivedName: canonicalName, + logicalName: name + } + })) + } + + delete frontMatter['name'] + return frontMatter as SubAgentYAMLFrontMatter + })() + + const prompt: SubAgentPrompt = { + type: PromptKind.SubAgent, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: entryName, + basePath: distDir, + getDirectoryName: () => entryName.replace(/\.mdx$/, ''), + getAbsolutePath: () => filePath + }, + ...agentPrefix != null && {agentPrefix}, + agentName, + canonicalName + } as SubAgentPrompt + + if (yamlFrontMatter == null) return prompt + + const validation = validateSubAgentMetadata(yamlFrontMatter as Record, filePath) + if (!validation.valid) throw new Error(validation.errors.join('\n')) + + Object.assign(prompt, {yamlFrontMatter}) + if (yamlFrontMatter.seriName != null) Object.assign(prompt, {seriName: yamlFrontMatter.seriName}) + return prompt + } + + override async collect(ctx: InputCapabilityContext): Promise> { + const {userConfigOptions: options, logger, path, fs, globalScope} = ctx + const resolvedPaths = this.resolveBasePaths(options) + + const srcDir = this.resolveAindexPath(options.aindex.subAgents.src, resolvedPaths.aindexDir) + const distDir = this.resolveAindexPath(options.aindex.subAgents.dist, resolvedPaths.aindexDir) + + logger.debug('SubAgentInputCapability collecting', { + srcDir, + distDir, + aindexDir: resolvedPaths.aindexDir + }) + + const reader = createLocalizedPromptReader(fs, path, logger, globalScope) + const warnedDerivedNames = new Set() + + const {prompts: localizedSubAgents, errors} = await reader.readFlatFiles( + srcDir, + distDir, + { + kind: PromptKind.SubAgent, + localeExtensions: SourceLocaleExtensions, + hydrateSourceContents: false, + isDirectoryStructure: false, + createPrompt: (content, locale, name, metadata) => this.createSubAgentPrompt( + content, + locale, + name, + srcDir, + distDir, + ctx, + metadata, + warnedDerivedNames + ) + } + ) + + logger.debug('SubAgentInputCapability read complete', { + subAgentCount: localizedSubAgents.length, + errorCount: errors.length + }) + + for (const error of errors) { + logger.warn(buildFileOperationDiagnostic({ + code: 'SUBAGENT_PROMPT_READ_FAILED', + title: 'Failed to read sub-agent prompt', + operation: error.phase === 'scan' ? 'scan' : 'read', + targetKind: 'sub-agent prompt', + path: error.path, + error: error.error, + details: { + phase: error.phase + } + })) + } + + if (errors.length > 0) throw new Error(errors.map(error => error.error instanceof Error ? error.error.message : String(error.error)).join('\n')) + + const flatSubAgents: SubAgentPrompt[] = [] + for (const localized of localizedSubAgents) { + const distContent = localized.dist + if (distContent?.prompt == null) continue + + const {prompt: distPrompt, rawMdx} = distContent + flatSubAgents.push(rawMdx == null + ? distPrompt + : {...distPrompt, rawMdxContent: rawMdx}) + } + + logger.debug('SubAgentInputCapability flattened subAgents', { + count: flatSubAgents.length, + agents: flatSubAgents.map(a => a.canonicalName) + }) + + return { + subAgents: flatSubAgents + } + } +} diff --git a/sdk/src/inputs/input-vscode-config.ts b/sdk/src/inputs/input-vscode-config.ts new file mode 100644 index 00000000..5476237e --- /dev/null +++ b/sdk/src/inputs/input-vscode-config.ts @@ -0,0 +1,27 @@ +import type {InputCapabilityContext, InputCollectedContext, ProjectIDEConfigFile} from '../plugins/plugin-core' +import {AbstractInputCapability, IDEKind} from '../plugins/plugin-core' +import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' + +export class VSCodeConfigInputCapability extends AbstractInputCapability { + constructor() { + super('VSCodeConfigInputCapability') + } + + collect(ctx: InputCapabilityContext): Partial { + const {userConfigOptions, fs} = ctx + const {workspaceDir, aindexDir} = this.resolveBasePaths(userConfigOptions) + + const files = ['.vscode/settings.json', '.vscode/extensions.json'] + const vscodeConfigFiles: ProjectIDEConfigFile[] = [] + + for (const relativePath of files) { + const file = readPublicIdeConfigDefinitionFile(IDEKind.VSCode, relativePath, aindexDir, fs, { + command: ctx.runtimeCommand, + workspaceDir + }) + if (file != null) vscodeConfigFiles.push(file) + } + + return {vscodeConfigFiles} + } +} diff --git a/sdk/src/inputs/input-workspace.ts b/sdk/src/inputs/input-workspace.ts new file mode 100644 index 00000000..dfc10863 --- /dev/null +++ b/sdk/src/inputs/input-workspace.ts @@ -0,0 +1,28 @@ +import type {InputCapabilityContext, InputCollectedContext, Workspace} from '../plugins/plugin-core' +import * as path from 'node:path' +import {AbstractInputCapability, FilePathKind} from '../plugins/plugin-core' + +export class WorkspaceInputCapability extends AbstractInputCapability { + constructor() { + super('WorkspaceInputCapability') + } + + collect(ctx: InputCapabilityContext): Partial { + const {userConfigOptions: options} = ctx + const {workspaceDir, aindexDir} = this.resolveBasePaths(options) + + const workspace: Workspace = { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir) + }, + projects: [] + } + + return { + workspace, + aindexDir + } + } +} diff --git a/sdk/src/inputs/input-zed-config.ts b/sdk/src/inputs/input-zed-config.ts new file mode 100644 index 00000000..32642374 --- /dev/null +++ b/sdk/src/inputs/input-zed-config.ts @@ -0,0 +1,23 @@ +import type {InputCapabilityContext, InputCollectedContext, ProjectIDEConfigFile} from '../plugins/plugin-core' +import {AbstractInputCapability, IDEKind} from '../plugins/plugin-core' +import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' + +export class ZedConfigInputCapability extends AbstractInputCapability { + constructor() { + super('ZedConfigInputCapability') + } + + collect(ctx: InputCapabilityContext): Partial { + const {userConfigOptions, fs} = ctx + const {workspaceDir, aindexDir} = this.resolveBasePaths(userConfigOptions) + + const zedConfigFiles: ProjectIDEConfigFile[] = [] + const file = readPublicIdeConfigDefinitionFile(IDEKind.Zed, '.zed/settings.json', aindexDir, fs, { + command: ctx.runtimeCommand, + workspaceDir + }) + if (file != null) zedConfigFiles.push(file) + + return {zedConfigFiles} + } +} diff --git a/sdk/src/inputs/runtime.ts b/sdk/src/inputs/runtime.ts new file mode 100644 index 00000000..710fe2f8 --- /dev/null +++ b/sdk/src/inputs/runtime.ts @@ -0,0 +1,172 @@ +import type {MdxGlobalScope} from '@truenine/md-compiler/globals' +import type { + InputCapability, + InputCapabilityContext, + InputCollectedContext, + PluginOptions, + UserConfigFile +} from '@/plugins/plugin-core' + +import * as fs from 'node:fs' +import * as path from 'node:path' +import {createLogger} from '@truenine/logger' +import glob from 'fast-glob' +import { + AIAgentIgnoreInputCapability, + AindexInputCapability, + CommandInputCapability, + EditorConfigInputCapability, + GitExcludeInputCapability, + GitIgnoreInputCapability, + GlobalMemoryInputCapability, + JetBrainsConfigInputCapability, + MarkdownWhitespaceCleanupEffectInputCapability, + OrphanFileCleanupEffectInputCapability, + ProjectPromptInputCapability, + ReadmeMdInputCapability, + RuleInputCapability, + SkillDistCleanupEffectInputCapability, + SkillInputCapability, + SubAgentInputCapability, + VSCodeConfigInputCapability, + WorkspaceInputCapability, + ZedConfigInputCapability +} from '@/inputs' +import {extractUserArgs, parseArgs} from '@/pipeline/CliArgumentParser' +import {buildDependencyContext, mergeContexts} from '@/pipeline/ContextMerger' +import {topologicalSort} from '@/pipeline/DependencyResolver' +import {GlobalScopeCollector, ScopePriority, ScopeRegistry} from '@/plugins/plugin-core/GlobalScopeCollector' + +export interface InputRuntimeOptions { + readonly pipelineArgs?: readonly string[] + readonly userConfigOptions: Required + readonly userConfig?: UserConfigFile + readonly capabilities?: readonly InputCapability[] + readonly includeBuiltinEffects?: boolean +} + +function createBuiltinInputEffectCapabilities(): InputCapability[] { + return [ + new SkillDistCleanupEffectInputCapability(), + new OrphanFileCleanupEffectInputCapability(), + new MarkdownWhitespaceCleanupEffectInputCapability() + ] +} + +function createBuiltinInputReaderCapabilities(): InputCapability[] { + return [ + new WorkspaceInputCapability(), + new AindexInputCapability(), + new VSCodeConfigInputCapability(), + new ZedConfigInputCapability(), + new JetBrainsConfigInputCapability(), + new EditorConfigInputCapability(), + new SkillInputCapability(), + new CommandInputCapability(), + new SubAgentInputCapability(), + new RuleInputCapability(), + new GlobalMemoryInputCapability(), + new ProjectPromptInputCapability(), + new ReadmeMdInputCapability(), + new GitIgnoreInputCapability(), + new GitExcludeInputCapability(), + new AIAgentIgnoreInputCapability() + ] +} + +export function resolveRuntimeCommand( + pipelineArgs?: readonly string[] +): InputCapabilityContext['runtimeCommand'] { + if (pipelineArgs == null || pipelineArgs.length === 0) return 'execute' + + const filteredArgs = pipelineArgs.filter((arg): arg is string => arg != null) + const userArgs = extractUserArgs(filteredArgs) + const args = parseArgs(userArgs) + + if (args.helpFlag || args.versionFlag || args.unknownCommand != null) return void 0 + if (args.subcommand === 'clean') return 'clean' + if (args.subcommand === 'plugins') return 'plugins' + if (args.subcommand === 'dry-run' || args.dryRun) return 'dry-run' + if (args.subcommand == null) return 'execute' + return void 0 +} + +export async function collectInputContext( + options: InputRuntimeOptions +): Promise> { + const { + pipelineArgs, + userConfigOptions, + userConfig, + capabilities, + includeBuiltinEffects = true + } = options + const logger = createLogger('InputRuntime', userConfigOptions.logLevel) + const runtimeCommand = resolveRuntimeCommand(pipelineArgs) + const baseCtx: Omit = { + logger, + userConfigOptions, + fs, + path, + glob + } + + const resolvedCapabilities = topologicalSort([ + ...includeBuiltinEffects ? createBuiltinInputEffectCapabilities() : [], + ...capabilities ?? createBuiltinInputReaderCapabilities() + ]) + const globalScopeCollector = new GlobalScopeCollector({userConfig}) + const globalScope: MdxGlobalScope = globalScopeCollector.collect() + const scopeRegistry = new ScopeRegistry() + scopeRegistry.setGlobalScope(globalScope) + + logger.debug('global scope collected', { + osInfo: { + platform: globalScope.os.platform, + arch: globalScope.os.arch, + shellKind: globalScope.os.shellKind + }, + hasProfile: Object.keys(globalScope.profile).length > 0, + hasTool: Object.keys(globalScope.tool).length > 0 + }) + + const outputsByCapability = new Map>() + let accumulatedContext: Partial = {} + + for (const capability of resolvedCapabilities) { + const dependencyContext = buildDependencyContext(capability, outputsByCapability, mergeContexts) + const ctx: InputCapabilityContext = { + ...baseCtx, + dependencyContext, + ...runtimeCommand != null ? {runtimeCommand} : {}, + globalScope, + scopeRegistry + } + + const capabilityWithEffects = capability as InputCapability & { + executeEffects?: (ctx: InputCapabilityContext, dryRun: boolean) => Promise + } + if (capabilityWithEffects.executeEffects != null) await capabilityWithEffects.executeEffects(ctx, false) + + const output = await capability.collect(ctx) + outputsByCapability.set(capability.name, output) + accumulatedContext = mergeContexts(accumulatedContext, output) + + const capabilityWithScopes = capability as InputCapability & { + getRegisteredScopes?: () => readonly {namespace: string, values: Record}[] + } + if (capabilityWithScopes.getRegisteredScopes != null) { + const registeredScopes = capabilityWithScopes.getRegisteredScopes() + for (const {namespace, values} of registeredScopes) { + scopeRegistry.register(namespace, values, ScopePriority.PluginRegistered) + logger.debug('input capability scope registered', { + capability: capability.name, + namespace, + keys: Object.keys(values) + }) + } + } + } + + return accumulatedContext +} diff --git a/sdk/src/lib.rs b/sdk/src/lib.rs new file mode 100644 index 00000000..e2d9bf5b --- /dev/null +++ b/sdk/src/lib.rs @@ -0,0 +1,546 @@ +//! tnmsc library — exposes core functionality for GUI backend direct invocation. +//! +//! Pure Rust commands: version, load_config, config_show +//! Bridge commands (Node.js): run_bridge_command + +pub mod bridge; +pub mod commands; +pub mod core; +pub(crate) mod diagnostic_helpers; + +use std::path::Path; + +use serde::{Deserialize, Serialize}; + +/// Unified error type for CLI library API. +#[derive(Debug, thiserror::Error)] +pub enum CliError { + #[error("Node.js not found in PATH")] + NodeNotFound, + + #[error("Plugin runtime not found: {0}")] + PluginRuntimeNotFound(String), + + #[error("Node.js process failed with exit code {code}: {stderr}")] + NodeProcessFailed { code: i32, stderr: String }, + + #[error("Config error: {0}")] + ConfigError(String), + + #[error("IO error: {0}")] + IoError(#[from] std::io::Error), + + #[error("Serialization error: {0}")] + SerializationError(#[from] serde_json::Error), +} + +/// Captured output from a bridge command (execute, dry-run, clean, plugins). +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BridgeCommandResult { + pub stdout: String, + pub stderr: String, + pub exit_code: i32, +} + +// --------------------------------------------------------------------------- +// Public API functions +// --------------------------------------------------------------------------- + +/// Return the CLI crate version string. +pub fn version() -> &'static str { + env!("CARGO_PKG_VERSION") +} + +/// Load and merge configuration from the canonical global config path. +pub fn load_config(cwd: &Path) -> Result { + core::config::ConfigLoader::with_defaults() + .try_load(cwd) + .map_err(CliError::ConfigError) +} + +/// Return the merged global configuration as a pretty-printed JSON string. +pub fn config_show(cwd: &Path) -> Result { + let result = core::config::ConfigLoader::with_defaults() + .try_load(cwd) + .map_err(CliError::ConfigError)?; + serde_json::to_string_pretty(&result.config).map_err(CliError::from) +} + +/// Execute a bridge command (execute, dry-run, clean, plugins) via Node.js subprocess. +/// +/// The subprocess output is captured (piped) and returned as a [`BridgeCommandResult`]. +pub fn run_bridge_command( + subcommand: &str, + cwd: &Path, + json_mode: bool, + extra_args: &[&str], +) -> Result { + bridge::node::run_node_command_captured(subcommand, cwd, json_mode, extra_args) +} + +// --------------------------------------------------------------------------- +// Property-based tests — Property 1: Library API returns typed results +// --------------------------------------------------------------------------- +#[cfg(test)] +mod property_tests { + use super::*; + use proptest::prelude::*; + use tempfile::TempDir; + + /// **Validates: Requirements 1.4, 1.5** + /// **Feature: gui-direct-cli-crate, Property 1: Library API returns typed results** + + // ---- version() ---- + + #[test] + fn version_returns_cargo_pkg_version() { + let v = version(); + assert!(!v.is_empty(), "version() must return a non-empty string"); + assert_eq!(v, env!("CARGO_PKG_VERSION")); + } + + proptest! { + /// version() always returns a non-empty &'static str that matches CARGO_PKG_VERSION, + /// regardless of how many times it is called. + #[test] + fn prop_version_always_non_empty(_seed in 0u64..10000) { + let v = version(); + prop_assert!(!v.is_empty(), "version() returned empty string"); + prop_assert_eq!(v, env!("CARGO_PKG_VERSION")); + } + + // ---- load_config(cwd) ---- + + /// For any temporary directory, load_config returns Ok(MergedConfigResult) + /// because ConfigLoader has defaults and doesn't fail on missing config files. + #[test] + fn prop_load_config_returns_ok_for_any_tempdir(_seed in 0u64..100) { + let tmp = TempDir::new().expect("failed to create tempdir"); + let result = load_config(tmp.path()); + prop_assert!(result.is_ok(), "load_config should return Ok for any valid dir, got: {:?}", result.err()); + let merged = result.unwrap(); + prop_assert!(merged.sources.is_empty() || !merged.sources.is_empty(), + "sources should be a valid Vec"); + } + + // ---- config_show(cwd) ---- + + /// For any temporary directory, config_show returns Ok(String) containing valid JSON. + #[test] + fn prop_config_show_returns_valid_json(_seed in 0u64..100) { + let tmp = TempDir::new().expect("failed to create tempdir"); + let result = config_show(tmp.path()); + prop_assert!(result.is_ok(), "config_show should return Ok, got: {:?}", result.err()); + let json_str = result.unwrap(); + let parsed: Result = serde_json::from_str(&json_str); + prop_assert!(parsed.is_ok(), "config_show output should be valid JSON, got: {}", json_str); + } + + // ---- BridgeCommandResult structural property ---- + + /// BridgeCommandResult fields are typed and accessible for any combination of + /// stdout/stderr/exit_code values. Verifies Property 1 for the result struct + /// without spawning any processes. + /// + /// **Feature: gui-direct-cli-crate, Property 1: Library API returns typed results** + #[test] + fn prop_bridge_command_result_fields_are_typed( + stdout in ".*", + stderr in ".*", + exit_code in proptest::num::i32::ANY, + ) { + let bcr = BridgeCommandResult { + stdout: stdout.clone(), + stderr: stderr.clone(), + exit_code, + }; + // Typed field access — verifies the struct is not a raw string wrapper + let s: &str = &bcr.stdout; + let e: &str = &bcr.stderr; + let c: i32 = bcr.exit_code; + prop_assert_eq!(s, stdout.as_str()); + prop_assert_eq!(e, stderr.as_str()); + prop_assert_eq!(c, exit_code); + // Verify round-trip JSON serialization (camelCase fields per serde rename_all) + let json = serde_json::to_string(&bcr).expect("BridgeCommandResult must serialize"); + prop_assert!(json.contains("\"stdout\""), "JSON must contain stdout field"); + prop_assert!(json.contains("\"stderr\""), "JSON must contain stderr field"); + prop_assert!(json.contains("\"exitCode\""), "JSON must contain exitCode field (camelCase)"); + // Verify round-trip deserialization + let bcr2: BridgeCommandResult = + serde_json::from_str(&json).expect("BridgeCommandResult must deserialize"); + prop_assert_eq!(bcr2.stdout.as_str(), stdout.as_str()); + prop_assert_eq!(bcr2.stderr.as_str(), stderr.as_str()); + prop_assert_eq!(bcr2.exit_code, exit_code); + } + } + + // ---- CliError pattern matching exhaustiveness ---- + + #[test] + fn cli_error_variants_are_matchable() { + let errors: Vec = vec![ + CliError::NodeNotFound, + CliError::PluginRuntimeNotFound("test".into()), + CliError::NodeProcessFailed { + code: 1, + stderr: "fail".into(), + }, + CliError::ConfigError("bad config".into()), + CliError::IoError(std::io::Error::new(std::io::ErrorKind::NotFound, "test")), + CliError::SerializationError(serde_json::from_str::("invalid").unwrap_err()), + ]; + + for err in &errors { + match err { + CliError::NodeNotFound => assert!(err.to_string().contains("Node.js")), + CliError::PluginRuntimeNotFound(msg) => assert!(!msg.is_empty()), + CliError::NodeProcessFailed { code, stderr } => { + assert_eq!(*code, 1); + assert!(!stderr.is_empty()); + } + CliError::ConfigError(msg) => assert!(!msg.is_empty()), + CliError::IoError(e) => assert!(!e.to_string().is_empty()), + CliError::SerializationError(e) => assert!(!e.to_string().is_empty()), + } + } + } + + /// Single environment probe: verifies run_bridge_command returns a typed Result. + /// Runs once (not in proptest) to avoid spawning Node.js hundreds of times. + /// If Node.js is not found, returns NodeNotFound. + /// If plugin-runtime.mjs is not found, returns PluginRuntimeNotFound. + /// Both are typed CliError variants — no panics, no raw strings. + /// + /// **Feature: gui-direct-cli-crate, Property 1: Library API returns typed results** + #[test] + fn run_bridge_command_returns_typed_result_or_typed_error() { + // Only probe the environment — do not spawn a real subcommand that may hang. + // We check find_node/find_plugin_runtime directly to verify the typed error path. + let node_available = bridge::node::find_node().is_some(); + let runtime_available = bridge::node::find_plugin_runtime().is_some(); + + if !node_available { + // Verify NodeNotFound is returned as a typed error + let tmp = tempfile::TempDir::new().unwrap(); + let result = run_bridge_command("version", tmp.path(), false, &[]); + assert!( + matches!(result, Err(CliError::NodeNotFound)), + "expected NodeNotFound when node is absent, got: {:?}", + result + ); + } else if !runtime_available { + // Verify PluginRuntimeNotFound is returned as a typed error + let tmp = tempfile::TempDir::new().unwrap(); + let result = run_bridge_command("version", tmp.path(), false, &[]); + assert!( + matches!(result, Err(CliError::PluginRuntimeNotFound(_))), + "expected PluginRuntimeNotFound when runtime is absent, got: {:?}", + result + ); + } else { + // Both available — verify the function signature compiles and returns Result + // We do NOT actually spawn a process here to avoid hanging on unknown subcommands. + // The typed return type is verified at compile time. + let _: fn(&str, &Path, bool, &[&str]) -> Result = + run_bridge_command; + } + } +} + +// --------------------------------------------------------------------------- +// Property-based tests — Property 3: Bridge command respects working directory +// --------------------------------------------------------------------------- +#[cfg(test)] +mod property_tests_cwd { + use super::*; + use proptest::prelude::*; + use tempfile::TempDir; + + // Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory + // Validates: Requirement 5.5 + // + // Property: For any valid filesystem path passed as `cwd` to `run_bridge_command`, + // the Node.js subprocess's working directory is set to that path. + // + // Testing strategy: + // - Create a real temporary directory (guarantees the path exists on disk). + // - Call `run_bridge_command` with that directory as `cwd`. + // - The key invariant: the error returned (if any) must be about Node.js or the + // plugin runtime being unavailable — NOT an IoError about the cwd being invalid. + // - An IoError whose kind is NotFound/PermissionDenied on the cwd itself would + // indicate the path was silently ignored or incorrectly passed to `current_dir`. + // - If Node.js IS available and the runtime IS found, the process runs in the + // given directory (verified by the absence of any cwd-related IoError). + + /// Helper: determine whether an error is a cwd-related IoError. + /// + /// `std::process::Command::current_dir` fails at spawn time with an IoError + /// when the directory does not exist or is not accessible. We distinguish + /// this from the expected "Node.js not found" / "runtime not found" errors. + fn is_cwd_io_error(err: &CliError) -> bool { + match err { + CliError::IoError(io_err) => { + // An IoError caused by a bad cwd typically surfaces as NotFound or + // PermissionDenied at the OS level when spawning the child process. + // We conservatively flag *any* IoError as a potential cwd problem + // so the test catches regressions where cwd is not forwarded. + matches!( + io_err.kind(), + std::io::ErrorKind::NotFound | std::io::ErrorKind::PermissionDenied + ) + } + _ => false, + } + } + + /// Probe the environment once so proptest iterations can skip actual spawning + /// when both Node.js and the plugin runtime are present (to avoid hanging). + fn node_available() -> bool { + bridge::node::find_node().is_some() + } + + fn runtime_available() -> bool { + bridge::node::find_plugin_runtime().is_some() + } + + proptest! { + // Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory + // Validates: Requirement 5.5 + // + // For any real temporary directory, calling run_bridge_command with that directory + // as `cwd` must NOT produce a cwd-related IoError. The only acceptable errors are + // NodeNotFound or PluginRuntimeNotFound — both indicate the cwd was accepted and + // forwarded correctly to the subprocess builder; the failure is about runtime + // availability, not about the working directory itself. + // + // When both Node.js and the plugin runtime are present the test verifies the + // property structurally (via source inspection) rather than by actually spawning + // a long-running process, to keep the test suite fast and deterministic. + #[test] + fn prop_bridge_command_cwd_is_forwarded_not_ignored(_seed in 0u64..100u64) { + // Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory + // Validates: Requirement 5.5 + let tmp = TempDir::new().expect("failed to create temp dir"); + let cwd = tmp.path(); + + // The directory must exist before we pass it to run_bridge_command. + prop_assert!(cwd.exists(), "temp dir must exist: {:?}", cwd); + prop_assert!(cwd.is_dir(), "temp dir must be a directory: {:?}", cwd); + + // When both node and runtime are available, spawning "execute" would block + // waiting for the plugin pipeline. Instead we verify the property by + // confirming that run_node_command_captured sets current_dir via the + // PluginRuntimeNotFound path: we use a non-existent runtime path scenario + // by checking the function signature and the source-level guarantee that + // `cmd.current_dir(cwd)` is called before `cmd.output()`. + // + // The structural guarantee is: in run_node_command_captured the line + // cmd.current_dir(cwd); + // appears unconditionally before cmd.output(), so any error from output() + // is never a "cwd was ignored" error. + if node_available() && runtime_available() { + // Verify the function accepts the cwd type without panicking. + // The compile-time type check is the strongest guarantee here. + let _: &std::path::Path = cwd; + // Property holds by construction — current_dir is always set. + return Ok(()); + } + + let result = run_bridge_command("execute", cwd, true, &[]); + + match result { + Ok(_) => { + // Node.js ran successfully in the given cwd — property holds. + } + Err(CliError::NodeNotFound) => { + // Node.js is not installed in this environment. + // The cwd was accepted (passed to Command::current_dir) before the + // NodeNotFound check, so the property still holds. + } + Err(CliError::PluginRuntimeNotFound(_)) => { + // Node.js found but plugin-runtime.mjs is absent. + // Again, cwd was accepted — property holds. + } + Err(CliError::NodeProcessFailed { .. }) => { + // Node.js ran but exited non-zero (e.g. runtime error). + // The process was launched with the correct cwd — property holds. + } + Err(ref err) if is_cwd_io_error(err) => { + // An IoError that looks like a bad working directory — property FAILS. + prop_assert!( + false, + "run_bridge_command returned a cwd-related IoError for an existing \ + directory {:?}: {:?}", + cwd, + err + ); + } + Err(_) => { + // Any other error (ConfigError, SerializationError, non-cwd IoError) + // is unrelated to the working directory — property holds. + } + } + } + } + + /// Deterministic unit test: creates N distinct temp dirs and verifies that + /// run_bridge_command never returns a cwd-related IoError for any of them. + /// + /// Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory + /// Validates: Requirement 5.5 + #[test] + fn bridge_command_accepts_any_existing_directory_as_cwd() { + // Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory + // Validates: Requirement 5.5 + + // Skip actual spawning when both node and runtime are present to avoid blocking. + if node_available() && runtime_available() { + // Structural guarantee: current_dir is set unconditionally in + // run_node_command_captured before cmd.output() is called. + // The property holds by construction. + return; + } + + let dirs: Vec = (0..5) + .map(|_| TempDir::new().expect("failed to create temp dir")) + .collect(); + + for tmp in &dirs { + let cwd = tmp.path(); + assert!(cwd.exists(), "temp dir must exist"); + + let result = run_bridge_command("execute", cwd, true, &[]); + + match result { + Ok(_) + | Err(CliError::NodeNotFound) + | Err(CliError::PluginRuntimeNotFound(_)) + | Err(CliError::NodeProcessFailed { .. }) => { + // All acceptable — cwd was forwarded correctly. + } + Err(ref err) if is_cwd_io_error(err) => { + panic!( + "run_bridge_command returned a cwd-related IoError for existing dir {:?}: {:?}", + cwd, err + ); + } + Err(_) => { + // Other errors are unrelated to cwd — acceptable. + } + } + } + } + + /// Negative test: passing a non-existent path should NOT silently succeed. + /// The error must be either NodeNotFound, PluginRuntimeNotFound, or an IoError + /// (because the OS rejects the non-existent cwd at spawn time). + /// + /// Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory + /// Validates: Requirement 5.5 + #[test] + fn bridge_command_with_nonexistent_cwd_returns_error_not_success() { + // Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory + // Validates: Requirement 5.5 + let nonexistent = std::path::Path::new("/this/path/does/not/exist/tnmsc_test_8_1"); + assert!(!nonexistent.exists(), "path must not exist for this test"); + + let result = run_bridge_command("execute", nonexistent, true, &[]); + + // Must NOT be Ok — a non-existent cwd should never produce a successful result. + assert!( + result.is_err(), + "run_bridge_command with non-existent cwd must return Err, got Ok" + ); + + // The error must be one of the expected variants — not a silent success. + match result { + Err(CliError::NodeNotFound) => { /* node not installed — acceptable */ } + Err(CliError::PluginRuntimeNotFound(_)) => { /* runtime absent — acceptable */ } + Err(CliError::IoError(_)) => { /* OS rejected the bad cwd — expected */ } + Err(CliError::NodeProcessFailed { .. }) => { /* process ran but failed — acceptable */ + } + Err(other) => { + // ConfigError / SerializationError are unexpected here but not a cwd bug. + // We allow them rather than over-constraining the test. + let _ = other; + } + Ok(_) => unreachable!("already asserted is_err above"), + } + } +} + +// --------------------------------------------------------------------------- +// Cargo workspace configuration validation tests +// --------------------------------------------------------------------------- +#[cfg(test)] +mod cargo_config_tests { + use std::fs; + + fn workspace_root() -> std::path::PathBuf { + let manifest_dir = env!("CARGO_MANIFEST_DIR"); + std::path::Path::new(manifest_dir) + .parent() + .expect("workspace root should exist") + .to_path_buf() + } + + /// Verify sdk/Cargo.toml exposes the tnmsc library target. + #[test] + fn sdk_cargo_toml_has_lib_target() { + let sdk_toml = workspace_root().join("sdk").join("Cargo.toml"); + let content = fs::read_to_string(&sdk_toml).expect("sdk/Cargo.toml should be readable"); + + assert!( + content.contains("[lib]"), + "sdk/Cargo.toml should contain [lib] section" + ); + } + + /// Verify the sdk library target keeps the tnmsc crate identity. + #[test] + fn sdk_cargo_toml_lib_crate_name_is_tnmsc() { + let sdk_toml = workspace_root().join("sdk").join("Cargo.toml"); + let content = fs::read_to_string(&sdk_toml).expect("sdk/Cargo.toml should be readable"); + + assert!( + content.contains("[package]\nname = \"tnmsc\"") + || content.contains("[package]\r\nname = \"tnmsc\""), + "sdk/Cargo.toml should keep package name = \"tnmsc\"" + ); + assert!( + content.contains("[lib]\nname = \"tnmsc\"") + || content.contains("[lib]\r\nname = \"tnmsc\""), + "sdk/Cargo.toml should keep lib name = \"tnmsc\"" + ); + } + + /// Verify gui/src-tauri/Cargo.toml contains tnmsc as a workspace dependency. + #[test] + fn gui_cargo_toml_has_tnmsc_workspace_dependency() { + let gui_toml = workspace_root() + .join("gui") + .join("src-tauri") + .join("Cargo.toml"); + let content = + fs::read_to_string(&gui_toml).expect("gui/src-tauri/Cargo.toml should be readable"); + + assert!( + content.contains("tnmsc = { workspace = true }"), + "gui/src-tauri/Cargo.toml should contain `tnmsc = {{ workspace = true }}`" + ); + } + + /// Verify root Cargo.toml declares tnmsc path dependency in [workspace.dependencies]. + #[test] + fn root_cargo_toml_has_tnmsc_workspace_path_dependency() { + let root_toml = workspace_root().join("Cargo.toml"); + let content = fs::read_to_string(&root_toml).expect("root Cargo.toml should be readable"); + + assert!( + content.contains(r#"tnmsc = { path = "sdk" }"#), + "root Cargo.toml [workspace.dependencies] should contain `tnmsc = {{ path = \"sdk\" }}`" + ); + } +} diff --git a/sdk/src/pipeline/CliArgumentParser.test.ts b/sdk/src/pipeline/CliArgumentParser.test.ts new file mode 100644 index 00000000..ad49ff88 --- /dev/null +++ b/sdk/src/pipeline/CliArgumentParser.test.ts @@ -0,0 +1,9 @@ +import {describe, expect, it} from 'vitest' +import {parseArgs, resolveCommand} from './CliArgumentParser' + +describe('cli argument parser', () => { + it('resolves the init subcommand to InitCommand', () => { + const command = resolveCommand(parseArgs(['init'])) + expect(command.name).toBe('init') + }) +}) diff --git a/sdk/src/pipeline/CliArgumentParser.ts b/sdk/src/pipeline/CliArgumentParser.ts new file mode 100644 index 00000000..ac5c1b60 --- /dev/null +++ b/sdk/src/pipeline/CliArgumentParser.ts @@ -0,0 +1,265 @@ +/** + * CLI Argument Parser Module + * Handles extraction and parsing of command-line arguments + * + * Refactored to use Command Factory pattern for command creation + */ + +import type {Command} from '@/commands/Command' +import {FactoryPriority} from '@/commands/CommandFactory' +import {CommandRegistry} from '@/commands/CommandRegistry' +import {CleanCommandFactory} from '@/commands/factories/CleanCommandFactory' +import {ConfigCommandFactory} from '@/commands/factories/ConfigCommandFactory' +import {DryRunCommandFactory} from '@/commands/factories/DryRunCommandFactory' +import {ExecuteCommandFactory} from '@/commands/factories/ExecuteCommandFactory' +import {HelpCommandFactory} from '@/commands/factories/HelpCommandFactory' +import {InitCommandFactory} from '@/commands/factories/InitCommandFactory' +import {PluginsCommandFactory} from '@/commands/factories/PluginsCommandFactory' +import {UnknownCommandFactory} from '@/commands/factories/UnknownCommandFactory' +import {VersionCommandFactory} from '@/commands/factories/VersionCommandFactory' + +/** + * Valid subcommands for the CLI + */ +export type Subcommand = 'help' | 'version' | 'init' | 'dry-run' | 'clean' | 'config' | 'plugins' + +/** + * Valid log levels for the CLI + */ +export type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' + +/** + * Command line argument parsing result + */ +export interface ParsedCliArgs { + readonly subcommand: Subcommand | undefined + readonly helpFlag: boolean + readonly versionFlag: boolean + readonly dryRun: boolean + readonly jsonFlag: boolean + readonly showFlag: boolean + readonly logLevel: LogLevel | undefined + readonly setOption: readonly [key: string, value: string][] + readonly unknownCommand: string | undefined + readonly positional: readonly string[] + readonly unknown: readonly string[] +} + +/** + * Valid subcommands set for quick lookup + */ +const VALID_SUBCOMMANDS: ReadonlySet = new Set(['help', 'version', 'init', 'dry-run', 'clean', 'config', 'plugins']) + +/** + * Log level flags mapping + */ +const LOG_LEVEL_FLAGS: ReadonlyMap = new Map([ + ['--trace', 'trace'], + ['--debug', 'debug'], + ['--info', 'info'], + ['--warn', 'warn'], + ['--error', 'error'] +]) + +/** + * Log level priority map (lower number = more verbose) + */ +const LOG_LEVEL_PRIORITY: ReadonlyMap = new Map([ + ['trace', 0], + ['debug', 1], + ['info', 2], + ['warn', 3], + ['error', 4] +]) + +/** + * Extract actual user arguments from argv + * Compatible with various execution scenarios: npx, node, tsx, direct execution, etc. + */ +export function extractUserArgs(argv: readonly string[]): string[] { + const args = [...argv] + + const first = args[0] // Skip runtime path (node, bun, deno, etc.) + if (first != null && isRuntimeExecutable(first)) args.shift() + + const second = args[0] // Skip script path or npx package name + if (second != null && isScriptOrPackage(second)) args.shift() + + return args +} + +/** + * Determine if it is a runtime executable + */ +function isRuntimeExecutable(arg: string): boolean { + const runtimes = ['node', 'nodejs', 'bun', 'deno', 'tsx', 'ts-node', 'npx', 'pnpx', 'yarn', 'pnpm'] + const normalized = arg.toLowerCase().replaceAll('\\', '/') + return runtimes.some(rt => { + const pattern = new RegExp(`(?:^|/)${rt}(?:\\.exe|\\.cmd|\\.ps1)?$`, 'i') + return pattern.test(normalized) || normalized === rt + }) +} + +/** + * Determine if it is a script file or package name + */ +function isScriptOrPackage(arg: string): boolean { + if (/\.(?:m?[jt]s|cjs)$/.test(arg)) return true // Script file + if (/[/\\]/.test(arg) && !arg.startsWith('-')) return true // File path containing separators + return /^(?:@[\w-]+\/)?[\w-]+$/.test(arg) && !arg.startsWith('-') // npx executed package name +} + +function pickMoreVerbose(current: LogLevel | undefined, candidate: LogLevel): LogLevel { + if (current == null) return candidate + const currentPriority = LOG_LEVEL_PRIORITY.get(current) ?? 4 + const candidatePriority = LOG_LEVEL_PRIORITY.get(candidate) ?? 4 + return candidatePriority < currentPriority ? candidate : current +} + +/** + * Parse command line arguments into structured result + */ +export function parseArgs(args: readonly string[]): ParsedCliArgs { + const result: { + subcommand: Subcommand | undefined + helpFlag: boolean + versionFlag: boolean + dryRun: boolean + jsonFlag: boolean + showFlag: boolean + logLevel: LogLevel | undefined + setOption: [key: string, value: string][] + unknownCommand: string | undefined + positional: string[] + unknown: string[] + } = { + subcommand: void 0, + helpFlag: false, + versionFlag: false, + dryRun: false, + jsonFlag: false, + showFlag: false, + logLevel: void 0, + setOption: [], + unknownCommand: void 0, + positional: [], + unknown: [] + } + + let firstPositionalProcessed = false + + for (let i = 0; i < args.length; i++) { + const arg = args[i] + if (arg == null) continue + + if (arg === '--') { // Handle -- separator: all following args are positional + result.positional.push(...args.slice(i + 1).filter((a): a is string => a != null)) + break + } + + if (arg.startsWith('--')) { // Long options + const parts = arg.split('=') + const key = parts[0] ?? '' + + const logLevel = LOG_LEVEL_FLAGS.get(key) // Check log level flags + if (logLevel != null) { + result.logLevel = pickMoreVerbose(result.logLevel, logLevel) + continue + } + + switch (key) { + case '--help': result.helpFlag = true; break + case '--version': result.versionFlag = true; break + case '--dry-run': result.dryRun = true; break + case '--json': result.jsonFlag = true; break + case '--show': result.showFlag = true; break + case '--set': + if (parts.length > 1) { // Parse --set key=value from next arg or from = syntax + const keyValue = parts.slice(1).join('=') + const eqIndex = keyValue.indexOf('=') + if (eqIndex > 0) result.setOption.push([keyValue.slice(0, eqIndex), keyValue.slice(eqIndex + 1)]) + } else { + const nextArg = args[i + 1] // Next arg is the value + if (nextArg != null) { + const eqIndex = nextArg.indexOf('=') + if (eqIndex > 0) { + result.setOption.push([nextArg.slice(0, eqIndex), nextArg.slice(eqIndex + 1)]) + i++ // Skip next arg + } + } + } + break + default: result.unknown.push(arg) + } + continue + } + + if (arg.startsWith('-') && arg.length > 1) { // Short options + const flags = arg.slice(1) + for (const flag of flags) { + switch (flag) { + case 'h': result.helpFlag = true; break + case 'v': result.versionFlag = true; break + case 'n': result.dryRun = true; break + case 'j': result.jsonFlag = true; break + default: result.unknown.push(`-${flag}`) + } + } + continue + } + + if (!firstPositionalProcessed) { // First positional argument: check if it's a subcommand + firstPositionalProcessed = true + if (VALID_SUBCOMMANDS.has(arg)) result.subcommand = arg as Subcommand + else { + result.unknownCommand = arg // Unknown first positional is captured as unknownCommand + } + continue + } + + result.positional.push(arg) // Remaining positional arguments + } + + return result +} + +/** + * Singleton instance of the command registry + * Lazy-loaded to ensure factories are only created when needed + */ +let commandRegistry: ReturnType | undefined + +function createDefaultCommandRegistry(): CommandRegistry { + const registry = new CommandRegistry() + + registry.register(new VersionCommandFactory()) // High priority: flag-based commands + registry.register(new HelpCommandFactory()) + registry.register(new UnknownCommandFactory()) + + registry.registerWithPriority(new InitCommandFactory(), FactoryPriority.Subcommand) + registry.registerWithPriority(new DryRunCommandFactory(), FactoryPriority.Subcommand) + registry.registerWithPriority(new CleanCommandFactory(), FactoryPriority.Subcommand) + registry.registerWithPriority(new PluginsCommandFactory(), FactoryPriority.Subcommand) + registry.registerWithPriority(new ConfigCommandFactory(), FactoryPriority.Subcommand) + + registry.registerWithPriority(new ExecuteCommandFactory(), FactoryPriority.Subcommand) // Lowest priority: default/catch-all command + + return registry +} + +/** + * Get or create the command registry singleton + */ +function getCommandRegistry(): ReturnType { + commandRegistry ??= createDefaultCommandRegistry() + return commandRegistry +} + +/** + * Resolve command from parsed CLI arguments using factory pattern + * Delegates command creation to registered factories based on priority + */ +export function resolveCommand(args: ParsedCliArgs): Command { + const registry = getCommandRegistry() + return registry.resolve(args) +} diff --git a/sdk/src/pipeline/ContextMerger.ts b/sdk/src/pipeline/ContextMerger.ts new file mode 100644 index 00000000..cf7dbd97 --- /dev/null +++ b/sdk/src/pipeline/ContextMerger.ts @@ -0,0 +1,207 @@ +/** + * Context Merger Module + * Handles merging of partial InputCollectedContext objects + */ + +import type {InputCollectedContext, Project, Workspace} from '../plugins/plugin-core' + +/** + * Merge strategy types for context fields + */ +type MergeStrategy = 'concat' | 'override' | 'mergeProjects' + +/** + * Field merge configuration + */ +interface FieldConfig { + readonly strategy: MergeStrategy + readonly getter: (ctx: Partial) => T | undefined +} + +/** + * Merge configuration for all InputCollectedContext fields + */ +const FIELD_CONFIGS: Record> = { + workspace: { + strategy: 'mergeProjects', + getter: ctx => ctx.workspace + }, + vscodeConfigFiles: { + strategy: 'concat', + getter: ctx => ctx.vscodeConfigFiles + }, + zedConfigFiles: { + strategy: 'concat', + getter: ctx => ctx.zedConfigFiles + }, + jetbrainsConfigFiles: { + strategy: 'concat', + getter: ctx => ctx.jetbrainsConfigFiles + }, + editorConfigFiles: { + strategy: 'concat', + getter: ctx => ctx.editorConfigFiles + }, + commands: { + strategy: 'concat', + getter: ctx => ctx.commands + }, + subAgents: { + strategy: 'concat', + getter: ctx => ctx.subAgents + }, + skills: { + strategy: 'concat', + getter: ctx => ctx.skills + }, + rules: { + strategy: 'concat', + getter: ctx => ctx.rules + }, + aiAgentIgnoreConfigFiles: { + strategy: 'concat', + getter: ctx => ctx.aiAgentIgnoreConfigFiles + }, + readmePrompts: { + strategy: 'concat', + getter: ctx => ctx.readmePrompts + }, + globalMemory: { + // Override fields (last one wins) + strategy: 'override', + getter: ctx => ctx.globalMemory + }, + aindexDir: { + strategy: 'override', + getter: ctx => ctx.aindexDir + }, + globalGitIgnore: { + strategy: 'override', + getter: ctx => ctx.globalGitIgnore + }, + shadowGitExclude: { + strategy: 'override', + getter: ctx => ctx.shadowGitExclude + } +} as const + +/** + * Merge two arrays by concatenating them + */ +function mergeArrays( + base: readonly T[] | undefined, + addition: readonly T[] | undefined +): readonly T[] { + if (addition == null) return base ?? [] + if (base == null) return addition + return [...base, ...addition] +} + +/** + * Merge workspace projects. Later projects with the same name replace earlier ones. + */ +function buildProjectMergeKey(project: Project): string { + if (project.isWorkspaceRootProject === true) return `workspace-root:${project.name ?? ''}` + + const promptSeries = project.promptSeries ?? 'workspace' + return `${promptSeries}:${project.name ?? ''}` +} + +function mergeWorkspaceProjects( + base: Workspace, + addition: Workspace +): Workspace { + const projectMap = new Map() + for (const project of base.projects) projectMap.set(buildProjectMergeKey(project), project) + for (const project of addition.projects) + { projectMap.set(buildProjectMergeKey(project), project) } + return { + directory: addition.directory ?? base.directory, + projects: [...projectMap.values()] + } +} + +/** + * Merge workspace fields + */ +function mergeWorkspace( + base: Workspace | undefined, + addition: Workspace | undefined +): Workspace | undefined { + if (addition == null) return base + if (base == null) return addition + return mergeWorkspaceProjects(base, addition) +} + +/** + * Merge a single field based on its strategy + */ +function mergeField( + base: T | undefined, + addition: T | undefined, + strategy: MergeStrategy +): T | undefined { + switch (strategy) { + case 'concat': + return mergeArrays( + base as unknown[], + addition as unknown[] + ) as unknown as T + case 'override': + return addition ?? base + case 'mergeProjects': + return mergeWorkspace( + base as unknown as Workspace, + addition as unknown as Workspace + ) as unknown as T + default: + return addition ?? base + } +} + +/** + * Merge two partial InputCollectedContext objects + * Uses configuration-driven approach to reduce code duplication + */ +export function mergeContexts( + base: Partial, + addition: Partial +): Partial { + const result: Record = {} + + for (const [fieldName, config] of Object.entries(FIELD_CONFIGS)) { + // Process each configured field + const baseValue = config.getter(base) + const additionValue = config.getter(addition) + const mergedValue = mergeField(baseValue, additionValue, config.strategy) + if (mergedValue != null) result[fieldName] = mergedValue + } + + return result as Partial +} + +/** + * Build dependency context from plugin outputs + */ +export function buildDependencyContext( + plugin: {dependsOn?: readonly string[]}, + outputsByPlugin: Map>, + mergeFn: ( + base: Partial, + addition: Partial + ) => Partial +): Partial { + const deps = plugin.dependsOn ?? [] + if (deps.length === 0) return {} + + const visited = new Set() + let merged: Partial = {} + for (const depName of deps) { + if (visited.has(depName)) continue + visited.add(depName) + const depOutput = outputsByPlugin.get(depName) + if (depOutput != null) merged = mergeFn(merged, depOutput) + } + + return merged +} diff --git a/sdk/src/pipeline/DependencyResolver.ts b/sdk/src/pipeline/DependencyResolver.ts new file mode 100644 index 00000000..36a5ef38 --- /dev/null +++ b/sdk/src/pipeline/DependencyResolver.ts @@ -0,0 +1,136 @@ +/** + * Dependency Resolver Module + * Handles dependency graph building, validation, and topological sorting + */ + +import type {DependencyNode} from '../plugins/plugin-core' +import {CircularDependencyError, MissingDependencyError} from '../plugins/plugin-core' + +/** + * Find cycle path in dependency graph for error reporting + */ +function findCyclePath( + nodes: readonly T[], + inDegree: Map +): string[] { + const cycleNodes = new Set() // Find nodes that are part of a cycle (in-degree > 0) + for (const [name, degree] of inDegree) { + if (degree > 0) cycleNodes.add(name) + } + + const deps = new Map() // Build dependency map for cycle nodes + for (const node of nodes) { + if (cycleNodes.has(node.name)) { + const nodeDeps = (node.dependsOn ?? []).filter(d => cycleNodes.has(d)) + deps.set(node.name, nodeDeps) + } + } + + const visited = new Set() // DFS to find cycle path + const path: string[] = [] + + const dfs = (node: string): boolean => { + if (path.includes(node)) { + path.push(node) // Found cycle, add closing node to complete the cycle + return true + } + if (visited.has(node)) return false + + visited.add(node) + path.push(node) + + for (const dep of deps.get(node) ?? []) { + if (dfs(dep)) return true + } + + path.pop() + return false + } + + for (const node of cycleNodes) { // Start DFS from any cycle node + if (dfs(node)) { + const lastNode = path.at(-1) + if (lastNode == null) return [...cycleNodes] + const cycleStart = path.indexOf(lastNode) // Extract just the cycle portion + return path.slice(cycleStart) + } + visited.clear() + path.length = 0 + } + + return [...cycleNodes] // Fallback: return all cycle nodes +} + +/** + * Topologically sort dependency nodes based on dependencies. + * Uses Kahn's algorithm with registration order preservation. + */ +export function topologicalSort( + nodes: readonly T[] +): T[] { + const nodeNames = new Set(nodes.map(node => node.name)) // Validate dependencies first + for (const node of nodes) { + const deps = node.dependsOn ?? [] + for (const dep of deps) { + if (!nodeNames.has(dep)) throw new MissingDependencyError(node.name, dep) + } + } + + const nodeMap = new Map() // Build node map for quick lookup + for (const node of nodes) nodeMap.set(node.name, node) + + const inDegree = new Map() // Build in-degree map (count of incoming edges) + for (const node of nodes) inDegree.set(node.name, 0) + + const dependents = new Map() // Build adjacency list (dependents for each node) + for (const node of nodes) dependents.set(node.name, []) + + for (const node of nodes) { // Populate in-degree and dependents + const deps = node.dependsOn ?? [] + for (const dep of deps) { + inDegree.set(node.name, (inDegree.get(node.name) ?? 0) + 1) // Increment in-degree for current node + const depList = dependents.get(dep) ?? [] // Add current node as dependent of dep + depList.push(node.name) + dependents.set(dep, depList) + } + } + + const queue: string[] = [] // Use registration order for initial queue // Initialize queue with nodes that have no dependencies (in-degree = 0) + for (const node of nodes) { + if (inDegree.get(node.name) === 0) queue.push(node.name) + } + + const result: T[] = [] // Process queue + const nodeIndexMap = new Map() // Pre-compute node indices for O(1) lookup - fixes O(n²) complexity + for (let i = 0; i < nodes.length; i++) { + const node = nodes[i] + if (node != null) nodeIndexMap.set(node.name, i) + } + + while (queue.length > 0) { + const current = queue.shift() // Take first element to preserve registration order + if (current == null) continue + + const node = nodeMap.get(current) + if (node == null) continue + result.push(node) + + const currentDependents = dependents.get(current) ?? [] // Process dependents in registration order + const sortedDependents = currentDependents.sort((a, b) => { // Sort dependents by their original registration order + const indexA = nodeIndexMap.get(a) ?? -1 + const indexB = nodeIndexMap.get(b) ?? -1 + return indexA - indexB + }) + + for (const dependent of sortedDependents) { + const newDegree = (inDegree.get(dependent) ?? 0) - 1 + inDegree.set(dependent, newDegree) + if (newDegree === 0) queue.push(dependent) + } + } + + if (result.length === nodes.length) return result // Check for cycle: if not all nodes are in result, there's a cycle + + const cyclePath = findCyclePath(nodes, inDegree) + throw new CircularDependencyError(cyclePath) +} diff --git a/sdk/src/pipeline/OutputRuntimeTargets.ts b/sdk/src/pipeline/OutputRuntimeTargets.ts new file mode 100644 index 00000000..0f9aa71b --- /dev/null +++ b/sdk/src/pipeline/OutputRuntimeTargets.ts @@ -0,0 +1,57 @@ +import type {ILogger, OutputRuntimeTargets} from '@/plugins/plugin-core' + +import * as fs from 'node:fs' +import * as path from 'node:path' +import {getPlatformFixedDir} from '@/core/desk-paths' +import {buildFileOperationDiagnostic} from '@/diagnostics' + +const JETBRAINS_VENDOR_DIR = 'JetBrains' +const JETBRAINS_AIA_DIR = 'aia' +const JETBRAINS_CODEX_DIR = 'codex' +const SUPPORTED_JETBRAINS_IDE_DIR_PREFIXES = [ + 'IntelliJIdea', + 'WebStorm', + 'RustRover', + 'PyCharm', + 'PyCharmCE', + 'PhpStorm', + 'GoLand', + 'CLion', + 'DataGrip', + 'RubyMine', + 'Rider', + 'DataSpell', + 'Aqua' +] as const + +function isSupportedJetBrainsIdeDir(dirName: string): boolean { + return SUPPORTED_JETBRAINS_IDE_DIR_PREFIXES.some(prefix => dirName.startsWith(prefix)) +} + +function discoverJetBrainsCodexDirs(logger: ILogger): readonly string[] { + const baseDir = path.join(getPlatformFixedDir(), JETBRAINS_VENDOR_DIR) + + try { + const dirents = fs.readdirSync(baseDir, {withFileTypes: true}) + return dirents + .filter(dirent => dirent.isDirectory() && isSupportedJetBrainsIdeDir(dirent.name)) + .map(dirent => path.join(baseDir, dirent.name, JETBRAINS_AIA_DIR, JETBRAINS_CODEX_DIR)) + } + catch (error) { + logger.debug(buildFileOperationDiagnostic({ + code: 'JETBRAINS_CODEX_DIRECTORY_SCAN_SKIPPED', + title: 'JetBrains Codex directories are unavailable', + operation: 'scan', + targetKind: 'JetBrains IDE directory', + path: baseDir, + error + })) + return [] + } +} + +export function discoverOutputRuntimeTargets(logger: ILogger): OutputRuntimeTargets { + return { + jetbrainsCodexDirs: discoverJetBrainsCodexDirs(logger) + } +} diff --git a/sdk/src/plugin-runtime.ts b/sdk/src/plugin-runtime.ts new file mode 100644 index 00000000..c23b0cf8 --- /dev/null +++ b/sdk/src/plugin-runtime.ts @@ -0,0 +1,128 @@ +import type {OutputCleanContext, OutputWriteContext} from './plugins/plugin-core' +/** + * Plugin Runtime Entry Point + * + * Streamlined entry for the Rust CLI binary to spawn via Node.js. + * Accepts a subcommand and flags, executes the plugin pipeline, + * and outputs results to stdout. + * + * Usage: node plugin-runtime.mjs [--json] [--dry-run] + * + * Subcommands: execute, dry-run, clean, plugins + */ +import type {Command, CommandContext} from '@/commands/Command' +import type {PipelineConfig} from '@/config' +import process from 'node:process' +import {CleanCommand} from '@/commands/CleanCommand' +import {DryRunCleanCommand} from '@/commands/DryRunCleanCommand' +import {DryRunOutputCommand} from '@/commands/DryRunOutputCommand' +import {ExecuteCommand} from '@/commands/ExecuteCommand' +import {JsonOutputCommand, toJsonCommandResult} from '@/commands/JsonOutputCommand' +import {PluginsCommand} from '@/commands/PluginsCommand' +import {buildUnhandledExceptionDiagnostic} from '@/diagnostics' +import {discoverOutputRuntimeTargets} from '@/pipeline/OutputRuntimeTargets' +import {createDefaultPluginConfig} from './plugin.config' +import {createLogger, drainBufferedDiagnostics, setGlobalLogLevel} from './plugins/plugin-core' + +/** + * Parse runtime arguments. + * Expected: node plugin-runtime.mjs [--json] [--dry-run] + */ +function parseRuntimeArgs(argv: string[]): {subcommand: string, json: boolean, dryRun: boolean} { + const args = argv.slice(2) // Skip node and script path + let subcommand = 'execute' + let json = false + let dryRun = false + + for (const arg of args) { + if (arg === '--json' || arg === '-j') json = true + else if (arg === '--dry-run' || arg === '-n') dryRun = true + else if (!arg.startsWith('-')) subcommand = arg + } + + return {subcommand, json, dryRun} +} + +/** + * Resolve command from subcommand string. + */ +function resolveRuntimeCommand(subcommand: string, dryRun: boolean): Command { + switch (subcommand) { + case 'execute': return new ExecuteCommand() + case 'dry-run': return new DryRunOutputCommand() + case 'clean': return dryRun ? new DryRunCleanCommand() : new CleanCommand() + case 'plugins': return new PluginsCommand() + default: return new ExecuteCommand() + } +} + +async function main(): Promise { + const {subcommand, json, dryRun} = parseRuntimeArgs(process.argv) + + if (json) setGlobalLogLevel('silent') + + const userPluginConfig: PipelineConfig = await createDefaultPluginConfig(process.argv) + + let command = resolveRuntimeCommand(subcommand, dryRun) + + if (json) { + const selfJsonCommands = new Set(['plugins']) + if (!selfJsonCommands.has(command.name)) command = new JsonOutputCommand(command) + } + + const {context, outputPlugins, userConfigOptions} = userPluginConfig + const logger = createLogger('PluginRuntime') + const runtimeTargets = discoverOutputRuntimeTargets(logger) + + const createCleanContext = (dry: boolean): OutputCleanContext => ({ + logger, + collectedOutputContext: context, + pluginOptions: userConfigOptions, + runtimeTargets, + dryRun: dry + }) + + const createWriteContext = (dry: boolean): OutputWriteContext => ({ + logger, + collectedOutputContext: context, + pluginOptions: userConfigOptions, + runtimeTargets, + dryRun: dry, + registeredPluginNames: Array.from(outputPlugins, plugin => plugin.name) + }) + + const commandCtx: CommandContext = { + logger, + outputPlugins: [...outputPlugins], + collectedOutputContext: context, + userConfigOptions, + createCleanContext, + createWriteContext + } + + const result = await command.execute(commandCtx) + if (!result.success) process.exit(1) +} + +function writeJsonFailure(error: unknown): void { + const errorMessage = error instanceof Error ? error.message : String(error) + const logger = createLogger('plugin-runtime', 'silent') + logger.error(buildUnhandledExceptionDiagnostic('plugin-runtime', error)) + process.stdout.write(`${JSON.stringify(toJsonCommandResult({ + success: false, + filesAffected: 0, + dirsAffected: 0, + message: errorMessage + }, drainBufferedDiagnostics()))}\n`) +} + +main().catch((e: unknown) => { + const {json} = parseRuntimeArgs(process.argv) + if (json) { + writeJsonFailure(e) + process.exit(1) + } + const logger = createLogger('plugin-runtime', 'error') + logger.error(buildUnhandledExceptionDiagnostic('plugin-runtime', e)) + process.exit(1) +}) diff --git a/sdk/src/plugin.config.ts b/sdk/src/plugin.config.ts new file mode 100644 index 00000000..8d0dd887 --- /dev/null +++ b/sdk/src/plugin.config.ts @@ -0,0 +1,58 @@ +import type {PipelineConfig} from '@/config' +import process from 'node:process' +import {GenericSkillsOutputPlugin} from '@truenine/plugin-agentskills-compact' +import {AgentsOutputPlugin} from '@truenine/plugin-agentsmd' +import {ClaudeCodeCLIOutputPlugin} from '@truenine/plugin-claude-code-cli' +import {CursorOutputPlugin} from '@truenine/plugin-cursor' +import {DroidCLIOutputPlugin} from '@truenine/plugin-droid-cli' +import {EditorConfigOutputPlugin} from '@truenine/plugin-editorconfig' +import {GeminiCLIOutputPlugin} from '@truenine/plugin-gemini-cli' +import {GitExcludeOutputPlugin} from '@truenine/plugin-git-exclude' +import {JetBrainsAIAssistantCodexOutputPlugin} from '@truenine/plugin-jetbrains-ai-codex' +import {JetBrainsIDECodeStyleConfigOutputPlugin} from '@truenine/plugin-jetbrains-codestyle' +import {CodexCLIOutputPlugin} from '@truenine/plugin-openai-codex-cli' +import {OpencodeCLIOutputPlugin} from '@truenine/plugin-opencode-cli' +import {QoderIDEPluginOutputPlugin} from '@truenine/plugin-qoder-ide' +import {ReadmeMdConfigFileOutputPlugin} from '@truenine/plugin-readme' +import {TraeIDEOutputPlugin} from '@truenine/plugin-trae-ide' +import {VisualStudioCodeIDEConfigOutputPlugin} from '@truenine/plugin-vscode' +import {WarpIDEOutputPlugin} from '@truenine/plugin-warp-ide' +import {WindsurfOutputPlugin} from '@truenine/plugin-windsurf' +import {ZedIDEConfigOutputPlugin} from '@truenine/plugin-zed' +import {defineConfig} from '@/config' +import {TraeCNIDEOutputPlugin} from '@/plugins/plugin-trae-cn-ide' + +export async function createDefaultPluginConfig( + pipelineArgs: readonly string[] = process.argv +): Promise { + return defineConfig({ + pipelineArgs, + pluginOptions: { + plugins: [ + new AgentsOutputPlugin(), + new ClaudeCodeCLIOutputPlugin(), + new CodexCLIOutputPlugin(), + new JetBrainsAIAssistantCodexOutputPlugin(), + new DroidCLIOutputPlugin(), + new GeminiCLIOutputPlugin(), + new GenericSkillsOutputPlugin(), + new OpencodeCLIOutputPlugin(), + new QoderIDEPluginOutputPlugin(), + new TraeIDEOutputPlugin(), + new TraeCNIDEOutputPlugin(), + new WarpIDEOutputPlugin(), + new WindsurfOutputPlugin(), + new CursorOutputPlugin(), + new GitExcludeOutputPlugin(), + + new JetBrainsIDECodeStyleConfigOutputPlugin(), + new EditorConfigOutputPlugin(), + new VisualStudioCodeIDEConfigOutputPlugin(), + new ZedIDEConfigOutputPlugin(), + new ReadmeMdConfigFileOutputPlugin() + ] + } + }) +} + +export default createDefaultPluginConfig diff --git a/sdk/src/plugins/AbstractOutputPlugin.test.ts b/sdk/src/plugins/AbstractOutputPlugin.test.ts new file mode 100644 index 00000000..8c6174c8 --- /dev/null +++ b/sdk/src/plugins/AbstractOutputPlugin.test.ts @@ -0,0 +1,122 @@ +import type {OutputWriteContext, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {AbstractOutputPlugin, createLogger, FilePathKind, PromptKind} from './plugin-core' + +class TestDefaultPromptOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('TestDefaultPromptOutputPlugin', { + outputFileName: 'TEST.md', + treatWorkspaceRootProjectAsProject: true + }) + } +} + +function createRootPrompt(content: string): ProjectRootMemoryPrompt { + return { + type: PromptKind.ProjectRootMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + }, + markdownContents: [] + } as ProjectRootMemoryPrompt +} + +function createChildPrompt( + workspaceBase: string, + projectName: string, + relativePath: string, + content: string +): ProjectChildrenMemoryPrompt { + return { + type: PromptKind.ProjectChildrenMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + markdownContents: [], + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceBase, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) + }, + workingChildDirectoryPath: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceBase, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) + } + } as ProjectChildrenMemoryPrompt +} + +describe('abstractOutputPlugin prompt-source project exclusion', () => { + it('skips prompt-source projects and still writes synthetic workspace root prompts through the default builder', async () => { + const plugin = new TestDefaultPromptOutputPlugin() + const workspaceBase = path.resolve('tmp/abstract-output-plugin') + const ctx = { + logger: createLogger('TestDefaultPromptOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + { + name: '__workspace__', + isWorkspaceRootProject: true, + rootMemoryPrompt: createRootPrompt('workspace root') + }, + { + name: 'aindex', + isPromptSourceProject: true, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'aindex', + basePath: workspaceBase, + getDirectoryName: () => 'aindex', + getAbsolutePath: () => path.join(workspaceBase, 'aindex') + }, + rootMemoryPrompt: createRootPrompt('prompt-source root'), + childMemoryPrompts: [createChildPrompt(workspaceBase, 'aindex', 'commands', 'prompt-source child')] + }, + { + name: 'project-a', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceBase, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceBase, 'project-a') + }, + rootMemoryPrompt: createRootPrompt('project root'), + childMemoryPrompts: [createChildPrompt(workspaceBase, 'project-a', 'commands', 'project child')] + } + ] + } + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(workspaceBase, 'TEST.md')) + expect(paths).toContain(path.join(workspaceBase, 'project-a', 'TEST.md')) + expect(paths).toContain(path.join(workspaceBase, 'project-a', 'commands', 'TEST.md')) + expect(paths).not.toContain(path.join(workspaceBase, 'aindex', 'TEST.md')) + expect(paths).not.toContain(path.join(workspaceBase, 'aindex', 'commands', 'TEST.md')) + }) +}) diff --git a/sdk/src/plugins/AgentsOutputPlugin.test.ts b/sdk/src/plugins/AgentsOutputPlugin.test.ts new file mode 100644 index 00000000..b9d9ffab --- /dev/null +++ b/sdk/src/plugins/AgentsOutputPlugin.test.ts @@ -0,0 +1,124 @@ +import type {OutputWriteContext, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {AgentsOutputPlugin} from './AgentsOutputPlugin' +import {createLogger, FilePathKind, PromptKind} from './plugin-core' + +function createRootPrompt(content: string): ProjectRootMemoryPrompt { + return { + type: PromptKind.ProjectRootMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + }, + markdownContents: [] + } as ProjectRootMemoryPrompt +} + +function createChildPrompt( + workspaceBase: string, + projectName: string, + relativePath: string, + content: string +): ProjectChildrenMemoryPrompt { + return { + type: PromptKind.ProjectChildrenMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + markdownContents: [], + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceBase, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) + }, + workingChildDirectoryPath: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceBase, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) + } + } as ProjectChildrenMemoryPrompt +} + +describe('agentsOutputPlugin prompt-source project exclusion', () => { + it('skips prompt-source project files and still writes the synthetic workspace root prompt', async () => { + const plugin = new AgentsOutputPlugin() + const workspaceBase = path.resolve('tmp/agents-plugin') + const ctx = { + logger: createLogger('AgentsOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + { + name: '__workspace__', + isWorkspaceRootProject: true, + rootMemoryPrompt: createRootPrompt('workspace root') + }, + { + name: 'aindex', + isPromptSourceProject: true, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'aindex', + basePath: workspaceBase, + getDirectoryName: () => 'aindex', + getAbsolutePath: () => path.join(workspaceBase, 'aindex') + }, + rootMemoryPrompt: createRootPrompt('prompt-source root'), + childMemoryPrompts: [createChildPrompt(workspaceBase, 'aindex', 'commands', 'prompt-source child')] + }, + { + name: 'project-a', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceBase, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceBase, 'project-a') + }, + rootMemoryPrompt: createRootPrompt('project root'), + childMemoryPrompts: [createChildPrompt(workspaceBase, 'project-a', 'commands', 'project child')] + } + ] + } + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const paths = declarations.map(declaration => declaration.path) + const workspaceDeclaration = declarations.find(declaration => declaration.path === path.join(workspaceBase, 'AGENTS.md')) + const rootDeclaration = declarations.find(declaration => declaration.path === path.join(workspaceBase, 'project-a', 'AGENTS.md')) + const childDeclaration = declarations.find(declaration => declaration.path === path.join(workspaceBase, 'project-a', 'commands', 'AGENTS.md')) + + expect(paths).toContain(path.join(workspaceBase, 'AGENTS.md')) + expect(paths).toContain(path.join(workspaceBase, 'project-a', 'AGENTS.md')) + expect(paths).toContain(path.join(workspaceBase, 'project-a', 'commands', 'AGENTS.md')) + expect(paths).not.toContain(path.join(workspaceBase, 'aindex', 'AGENTS.md')) + expect(paths).not.toContain(path.join(workspaceBase, 'aindex', 'commands', 'AGENTS.md')) + if (workspaceDeclaration == null || rootDeclaration == null || childDeclaration == null) { + throw new Error('Expected AGENTS.md declarations were not emitted') + } + + await expect(plugin.convertContent(workspaceDeclaration, ctx)).resolves.toBe('workspace root') + await expect(plugin.convertContent(rootDeclaration, ctx)).resolves.toBe('project root') + await expect(plugin.convertContent(childDeclaration, ctx)).resolves.toBe('project child') + }) +}) diff --git a/sdk/src/plugins/AgentsOutputPlugin.ts b/sdk/src/plugins/AgentsOutputPlugin.ts new file mode 100644 index 00000000..1ae9ab13 --- /dev/null +++ b/sdk/src/plugins/AgentsOutputPlugin.ts @@ -0,0 +1,127 @@ +import type { + OutputCleanContext, + OutputCleanupDeclarations, + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin} from './plugin-core' + +const PROJECT_MEMORY_FILE = 'AGENTS.md' + +export class AgentsOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('AgentsOutputPlugin', { + outputFileName: PROJECT_MEMORY_FILE, + treatWorkspaceRootProjectAsProject: true, + capabilities: { + prompt: { + scopes: ['project'], + singleScope: false + } + } + }) + } + + override async declareCleanupPaths( + ctx: OutputCleanContext + ): Promise { + const declarations = await super.declareCleanupPaths(ctx) + const promptSourceProjects + = ctx.collectedOutputContext.workspace.projects.filter( + project => project.isPromptSourceProject === true + ) + const promptSourceExcludeGlobs = promptSourceProjects + .map(project => project.dirFromWorkspacePath) + .filter((dir): dir is NonNullable => dir != null) + .map(dir => this.resolvePath(dir.basePath, dir.path, '**')) + + return { + ...declarations, + delete: [ + ...declarations.delete ?? [], + ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) + ], + excludeScanGlobs: [ + ...declarations.excludeScanGlobs ?? [], + ...promptSourceExcludeGlobs + ] + } + } + + override async declareOutputFiles( + ctx: OutputWriteContext + ): Promise { + const results: OutputFileDeclaration[] = [] + const promptProjects = this.getProjectPromptOutputProjects(ctx) + const activePromptScopes = new Set( + this.selectPromptScopes(ctx, ['project']) + ) + if (!activePromptScopes.has('project')) return results + + for (const [projectIndex, project] of promptProjects.entries()) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (project.rootMemoryPrompt != null && projectRootDir != null) { + results.push({ + path: this.resolvePath(projectRootDir, PROJECT_MEMORY_FILE), + scope: 'project', + source: {type: 'projectRootMemory', projectIndex} + }) + } + + if (project.childMemoryPrompts != null) { + for (const [ + childIndex, + child + ] of project.childMemoryPrompts.entries()) { + results.push({ + path: this.resolveFullPath(child.dir), + scope: 'project', + source: {type: 'projectChildMemory', projectIndex, childIndex} + }) + } + } + } + + return results + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + const projects = this.getProjectPromptOutputProjects(ctx) + const source = declaration.source as { + type?: string + projectIndex?: number + childIndex?: number + } + + const projectIndex = source.projectIndex ?? -1 + if (projectIndex < 0 || projectIndex >= projects.length) + { throw new Error(`Invalid project index in declaration for ${this.name}`) } + + const project = projects[projectIndex] + if (project == null) + { throw new Error(`Project not found for declaration in ${this.name}`) } + + if (source.type === 'projectRootMemory') { + if (project.rootMemoryPrompt == null) + { throw new Error( + `Root memory prompt missing for project index ${projectIndex}` + ) } + return project.rootMemoryPrompt.content as string + } + + if (source.type === 'projectChildMemory') { + const childIndex = source.childIndex ?? -1 + const child = project.childMemoryPrompts?.[childIndex] + if (child == null) + { throw new Error( + `Child memory prompt missing for project ${projectIndex}, child ${childIndex}` + ) } + return child.content as string + } + + throw new Error(`Unsupported declaration source for ${this.name}`) + } +} diff --git a/sdk/src/plugins/ClaudeCodeCLIOutputPlugin.ts b/sdk/src/plugins/ClaudeCodeCLIOutputPlugin.ts new file mode 100644 index 00000000..cb4fa6f9 --- /dev/null +++ b/sdk/src/plugins/ClaudeCodeCLIOutputPlugin.ts @@ -0,0 +1,123 @@ +import type { + OutputCleanContext, + OutputCleanupDeclarations, + RulePrompt +} from './plugin-core' +import {doubleQuoted} from '@truenine/md-compiler/markdown' +import {AbstractOutputPlugin} from './plugin-core' + +const PROJECT_MEMORY_FILE = 'CLAUDE.md' +const GLOBAL_CONFIG_DIR = '.claude' +const COMMANDS_SUBDIR = 'commands' +const AGENTS_SUBDIR = 'agents' +const SKILLS_SUBDIR = 'skills' + +/** + * Output plugin for Claude Code CLI. + * + * Outputs rules to `.claude/rules/` directory with frontmatter format. + * + * @see https://github.com/anthropics/claude-code/issues/26868 + * Known bug: Claude Code CLI has issues with `.claude/rules` directory handling. + * This may affect rule loading behavior in certain scenarios. + */ +export class ClaudeCodeCLIOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('ClaudeCodeCLIOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: PROJECT_MEMORY_FILE, + treatWorkspaceRootProjectAsProject: true, + toolPreset: 'claudeCode', + commands: { + subDir: COMMANDS_SUBDIR, + transformFrontMatter: (_cmd, context) => + context.sourceFrontMatter ?? {} + }, + subagents: { + subDir: AGENTS_SUBDIR, + sourceScopes: ['project'], + includePrefix: true, + linkSymbol: '-', + ext: '.md' + }, + skills: { + subDir: SKILLS_SUBDIR + }, + rules: { + transformFrontMatter: (rule: RulePrompt) => ({ + paths: rule.globs.map(doubleQuoted) + }) + }, + cleanup: { + delete: { + project: { + dirs: [ + '.claude/rules', + '.claude/commands', + '.claude/agents', + '.claude/skills' + ] + }, + global: { + files: ['.claude/CLAUDE.md'], + dirs: [ + '.claude/rules', + '.claude/commands', + '.claude/agents', + '.claude/skills' + ] + } + } + }, + wslMirrors: ['~/.claude/settings.json', '~/.claude/config.json'], + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + rules: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'global'], + singleScope: true + }, + subagents: { + scopes: ['project'], + singleScope: true + }, + skills: { + scopes: ['project', 'global'], + singleScope: true + } + } + }) + } + + override async declareCleanupPaths( + ctx: OutputCleanContext + ): Promise { + const declarations = await super.declareCleanupPaths(ctx) + const promptSourceProjects + = ctx.collectedOutputContext.workspace.projects.filter( + project => project.isPromptSourceProject === true + ) + const promptSourceExcludeGlobs = promptSourceProjects + .map(project => project.dirFromWorkspacePath) + .filter((dir): dir is NonNullable => dir != null) + .map(dir => this.resolvePath(dir.basePath, dir.path, '**')) + + return { + ...declarations, + delete: [ + ...declarations.delete ?? [], + ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) + ], + excludeScanGlobs: [ + ...declarations.excludeScanGlobs ?? [], + ...promptSourceExcludeGlobs + ] + } + } +} diff --git a/sdk/src/plugins/CodexCLIOutputPlugin.test.ts b/sdk/src/plugins/CodexCLIOutputPlugin.test.ts new file mode 100644 index 00000000..ff516f92 --- /dev/null +++ b/sdk/src/plugins/CodexCLIOutputPlugin.test.ts @@ -0,0 +1,364 @@ +import type {CommandPrompt, InputCapabilityContext, OutputCleanContext, OutputWriteContext, SubAgentPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {collectDeletionTargets} from '../commands/CleanupUtils' +import {mergeConfig} from '../config' +import {CommandInputCapability} from '../inputs/input-command' +import {CodexCLIOutputPlugin} from './CodexCLIOutputPlugin' +import {createLogger, FilePathKind, PromptKind} from './plugin-core' + +class TestCodexCLIOutputPlugin extends CodexCLIOutputPlugin { + constructor(private readonly testHomeDir: string) { + super() + } + + protected override getHomeDir(): string { + return this.testHomeDir + } +} + +async function withTempCodexDirs( + prefix: string, + run: (paths: {readonly workspace: string, readonly homeDir: string}) => Promise +): Promise { + const workspace = fs.mkdtempSync(path.join(os.tmpdir(), `${prefix}-workspace-`)) + const homeDir = fs.mkdtempSync(path.join(os.tmpdir(), `${prefix}-home-`)) + + try { + await run({workspace, homeDir}) + } + finally { + fs.rmSync(workspace, {recursive: true, force: true}) + fs.rmSync(homeDir, {recursive: true, force: true}) + } +} + +function createInputContext(tempWorkspace: string): InputCapabilityContext { + return { + logger: createLogger('CodexCLIOutputPluginTest', 'error'), + fs, + path, + glob, + userConfigOptions: mergeConfig({workspaceDir: tempWorkspace}), + dependencyContext: {} + } as InputCapabilityContext +} + +function createCleanContext(): OutputCleanContext { + return { + logger: { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + }, + fs, + path, + glob, + dryRun: true, + runtimeTargets: { + jetbrainsCodexDirs: [] + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Relative, + path: '.', + basePath: '.', + getDirectoryName: () => '.', + getAbsolutePath: () => path.resolve('.') + }, + projects: [] + } + } + } as OutputCleanContext +} + +function createWriteContext( + tempWorkspace: string, + commands: readonly CommandPrompt[], + subAgents: readonly SubAgentPrompt[] = [], + pluginOptions?: OutputWriteContext['pluginOptions'] +): OutputWriteContext { + return { + logger: createLogger('CodexCLIOutputPluginTest', 'error'), + fs, + path, + glob, + dryRun: true, + ...pluginOptions != null && {pluginOptions}, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: tempWorkspace, + getDirectoryName: () => path.basename(tempWorkspace) + }, + projects: [{ + name: 'project-a', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: tempWorkspace, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(tempWorkspace, 'project-a') + }, + isPromptSourceProject: true + }, { + name: 'project-b', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-b', + basePath: tempWorkspace, + getDirectoryName: () => 'project-b', + getAbsolutePath: () => path.join(tempWorkspace, 'project-b') + } + }] + }, + commands, + subAgents + } + } as OutputWriteContext +} + +function createProjectCommandPrompt(): CommandPrompt { + return { + type: PromptKind.Command, + content: 'project command body', + length: 22, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'commands/dev/build.mdx', + basePath: path.resolve('tmp/dist/commands'), + getDirectoryName: () => 'dev', + getAbsolutePath: () => path.resolve('tmp/dist/commands/dev/build.mdx') + }, + commandPrefix: 'dev', + commandName: 'build', + yamlFrontMatter: { + description: 'Project command', + scope: 'project' + }, + markdownContents: [] + } as CommandPrompt +} + +function createCommandPromptWithToolFields(): CommandPrompt { + return { + ...createProjectCommandPrompt(), + yamlFrontMatter: { + description: 'Tool-aware command', + scope: 'project', + allowTools: ['shell'], + allowedTools: ['shell'] + } as unknown as CommandPrompt['yamlFrontMatter'] + } as CommandPrompt +} + +function createSubAgentPrompt(scope: 'project' | 'global'): SubAgentPrompt { + return { + type: PromptKind.SubAgent, + content: 'Review changes carefully.\nFocus on concrete regressions.', + length: 55, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'subagents/qa/reviewer.mdx', + basePath: path.resolve('tmp/dist/subagents'), + getDirectoryName: () => 'qa', + getAbsolutePath: () => path.resolve('tmp/dist/subagents/qa/reviewer.mdx') + }, + agentPrefix: 'qa', + agentName: 'reviewer', + canonicalName: 'qa-reviewer', + yamlFrontMatter: { + description: 'Review pull requests', + scope, + model: 'gpt-5.2', + allowTools: ['shell'], + color: 'blue', + nickname_candidates: ['guard'], + sandbox_mode: 'workspace-write', + mcp_servers: { + docs: { + command: 'node', + args: ['mcp.js'] + } + } + } as unknown as SubAgentPrompt['yamlFrontMatter'], + markdownContents: [] + } as SubAgentPrompt +} + +describe('codexCLIOutputPlugin command output', () => { + it('renders codex commands from dist content instead of the zh source prompt', async () => { + await withTempCodexDirs('tnmsc-codex-command', async ({workspace, homeDir}) => { + const srcDir = path.join(workspace, 'aindex', 'commands', 'find') + const distDir = path.join(workspace, 'aindex', 'dist', 'commands', 'find') + + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + + fs.writeFileSync(path.join(srcDir, 'opensource.src.mdx'), [ + 'export default {', + ' description: \'中文源描述\',', + '}', + '', + '中文源命令内容', + '' + ].join('\n'), 'utf8') + fs.writeFileSync(path.join(distDir, 'opensource.mdx'), [ + 'export default {', + ' description: \'English dist description\',', + '}', + '', + 'English dist command body', + '' + ].join('\n'), 'utf8') + + const commandInputCapability = new CommandInputCapability() + const collected = await commandInputCapability.collect(createInputContext(workspace)) + const commands = collected.commands ?? [] + + expect(commands).toHaveLength(1) + + const codexPlugin = new TestCodexCLIOutputPlugin(homeDir) + const writeCtx = createWriteContext(workspace, commands) + const declarations = await codexPlugin.declareOutputFiles(writeCtx) + const commandDeclaration = declarations.find( + declaration => declaration.path.replaceAll('\\', '/').endsWith('/.codex/prompts/find-opensource.md') + ) + + expect(commandDeclaration).toBeDefined() + if (commandDeclaration == null) throw new Error('Expected codex command declaration') + + const rendered = await codexPlugin.convertContent(commandDeclaration, writeCtx) + expect(String(rendered)).toContain('English dist description') + expect(String(rendered)).toContain('English dist command body') + expect(String(rendered)).not.toContain('中文源描述') + expect(String(rendered)).not.toContain('中文源命令内容') + }) + }) + + it('keeps project-scoped commands in the global codex directory and never mirrors them into workspace root', async () => { + await withTempCodexDirs('tnmsc-codex-project-command', async ({workspace, homeDir}) => { + const plugin = new TestCodexCLIOutputPlugin(homeDir) + const writeCtx = createWriteContext(workspace, [createProjectCommandPrompt()]) + + const declarations = await plugin.declareOutputFiles(writeCtx) + + expect(declarations.map(declaration => declaration.path)).toContain( + path.join(homeDir, '.codex', 'prompts', 'dev-build.md') + ) + expect(declarations.map(declaration => declaration.path)).not.toContain( + path.join(workspace, '.codex', 'prompts', 'dev-build.md') + ) + expect(declarations.every(declaration => declaration.scope === 'global')).toBe(true) + }) + }) + + it('drops tool allowlist fields from codex command front matter', async () => { + await withTempCodexDirs('tnmsc-codex-command-tools', async ({workspace, homeDir}) => { + const plugin = new TestCodexCLIOutputPlugin(homeDir) + const writeCtx = createWriteContext(workspace, [createCommandPromptWithToolFields()]) + const declarations = await plugin.declareOutputFiles(writeCtx) + const declaration = declarations.find(item => item.path === path.join(homeDir, '.codex', 'prompts', 'dev-build.md')) + + expect(declaration).toBeDefined() + if (declaration == null) throw new Error('Expected codex command declaration') + + const rendered = await plugin.convertContent(declaration, writeCtx) + expect(String(rendered)).toContain('description: Tool-aware command') + expect(String(rendered)).not.toContain('allowTools') + expect(String(rendered)).not.toContain('allowedTools') + }) + }) + + it('writes project-scoped subagents into each project .codex/agents directory as toml', async () => { + await withTempCodexDirs('tnmsc-codex-project-subagent', async ({workspace, homeDir}) => { + const plugin = new TestCodexCLIOutputPlugin(homeDir) + const writeCtx = createWriteContext(workspace, [], [createSubAgentPrompt('project')]) + + const declarations = await plugin.declareOutputFiles(writeCtx) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(workspace, 'project-a', '.codex', 'agents', 'qa-reviewer.toml')) + expect(paths).toContain(path.join(workspace, 'project-b', '.codex', 'agents', 'qa-reviewer.toml')) + expect(paths).not.toContain(path.join(homeDir, '.codex', 'agents', 'qa-reviewer.toml')) + + const declaration = declarations.find(item => item.path === path.join(workspace, 'project-a', '.codex', 'agents', 'qa-reviewer.toml')) + expect(declaration).toBeDefined() + if (declaration == null) throw new Error('Expected codex subagent declaration') + + const rendered = await plugin.convertContent(declaration, writeCtx) + expect(String(rendered)).toContain('name = "qa-reviewer"') + expect(String(rendered)).toContain('description = "Review pull requests"') + expect(String(rendered)).toContain([ + 'developer_instructions = """', + 'Review changes carefully.', + 'Focus on concrete regressions."""' + ].join('\n')) + expect(String(rendered)).toContain('nickname_candidates = ["guard"]') + expect(String(rendered)).toContain('sandbox_mode = "workspace-write"') + expect(String(rendered)).toContain('[mcp_servers]') + expect(String(rendered)).toContain('[mcp_servers.docs]') + expect(String(rendered)).not.toContain('model = ') + expect(String(rendered)).not.toContain('scope = ') + expect(String(rendered)).not.toContain('allowTools') + expect(String(rendered)).not.toContain('allowedTools') + expect(String(rendered)).not.toContain('color = ') + }) + }) + + it('remaps global-scoped subagents to project outputs instead of writing to the global codex directory', async () => { + await withTempCodexDirs('tnmsc-codex-global-subagent', async ({workspace, homeDir}) => { + const plugin = new TestCodexCLIOutputPlugin(homeDir) + const writeCtx = createWriteContext(workspace, [], [createSubAgentPrompt('global')]) + + const declarations = await plugin.declareOutputFiles(writeCtx) + + expect(declarations.map(declaration => declaration.path)).toContain( + path.join(workspace, 'project-a', '.codex', 'agents', 'qa-reviewer.toml') + ) + expect(declarations.map(declaration => declaration.path)).not.toContain( + path.join(homeDir, '.codex', 'agents', 'qa-reviewer.toml') + ) + expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) + }) + }) + + it('cleans global codex skills while preserving the built-in .system directory', async () => { + await withTempCodexDirs('tnmsc-codex-cleanup-skills', async ({homeDir}) => { + const plugin = new TestCodexCLIOutputPlugin(homeDir) + const skillsDir = path.join(homeDir, '.codex', 'skills') + const preservedDir = path.join(skillsDir, '.system') + const staleDir = path.join(skillsDir, 'legacy-skill') + + fs.mkdirSync(preservedDir, {recursive: true}) + fs.mkdirSync(staleDir, {recursive: true}) + fs.writeFileSync(path.join(preservedDir, 'SKILL.md'), '# preserved', 'utf8') + fs.writeFileSync(path.join(staleDir, 'SKILL.md'), '# stale', 'utf8') + + const cleanupDeclarations = await plugin.declareCleanupPaths(createCleanContext()) + const protectPaths = cleanupDeclarations.protect?.map(target => target.path.replaceAll('\\', '/')) ?? [] + const skillCleanupTarget = cleanupDeclarations.delete?.find(target => target.kind === 'glob' && target.path.includes(`${path.sep}.codex${path.sep}skills${path.sep}`)) + const cleanupPlan = await collectDeletionTargets([plugin], createCleanContext()) + const normalizedDeleteDirs = cleanupPlan.dirsToDelete.map(target => target.replaceAll('\\', '/')) + const normalizedPreservedDir = preservedDir.replaceAll('\\', '/') + const normalizedStaleDir = staleDir.replaceAll('\\', '/') + + expect(skillCleanupTarget).toBeDefined() + expect(skillCleanupTarget?.excludeBasenames).toEqual(['.system']) + expect(protectPaths).toContain(normalizedPreservedDir) + expect(normalizedDeleteDirs).toContain(normalizedStaleDir) + expect(normalizedDeleteDirs).not.toContain(normalizedPreservedDir) + expect(cleanupPlan.violations).toEqual([]) + }) + }) +}) diff --git a/sdk/src/plugins/CodexCLIOutputPlugin.ts b/sdk/src/plugins/CodexCLIOutputPlugin.ts new file mode 100644 index 00000000..e11d54f9 --- /dev/null +++ b/sdk/src/plugins/CodexCLIOutputPlugin.ts @@ -0,0 +1,124 @@ +import type {AbstractOutputPluginOptions, OutputCleanContext, OutputCleanupDeclarations} from './plugin-core' +import {AbstractOutputPlugin, PLUGIN_NAMES, resolveSubAgentCanonicalName} from './plugin-core' + +const PROJECT_MEMORY_FILE = 'AGENTS.md' +const GLOBAL_CONFIG_DIR = '.codex' +const PROMPTS_SUBDIR = 'prompts' +const AGENTS_SUBDIR = 'agents' +const SKILLS_SUBDIR = 'skills' +const PRESERVED_SYSTEM_SKILL_DIR = '.system' +const CODEX_SUBAGENT_FIELD_ORDER = ['name', 'description', 'developer_instructions'] as const +const CODEX_EXCLUDED_SUBAGENT_FIELDS = ['scope', 'seriName', 'argumentHint', 'color', 'namingCase', 'model'] as const + +function sanitizeCodexFrontMatter( + sourceFrontMatter?: Record +): Record { + const frontMatter = {...sourceFrontMatter} + + // Codex front matter rejects tool allowlists. Keep accepting upstream metadata + // for other outputs, but drop both common spellings here for Codex compatibility. + delete frontMatter['allowTools'] + delete frontMatter['allowedTools'] + return frontMatter +} + +function transformCodexSubAgentFrontMatter( + subAgentCanonicalName: string, + sourceFrontMatter?: Record +): Record { + const frontMatter = sanitizeCodexFrontMatter(sourceFrontMatter) + frontMatter['name'] = subAgentCanonicalName + return frontMatter +} + +const CODEX_OUTPUT_OPTIONS = { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: PROJECT_MEMORY_FILE, + commands: { + subDir: PROMPTS_SUBDIR, + scopeRemap: { + project: 'global' + }, + transformFrontMatter: (_cmd, context) => sanitizeCodexFrontMatter(context.sourceFrontMatter) + }, + subagents: { + subDir: AGENTS_SUBDIR, + sourceScopes: ['project'], + scopeRemap: { + global: 'project' + }, + ext: '.toml', + artifactFormat: 'toml', + bodyFieldName: 'developer_instructions', + excludedFrontMatterFields: CODEX_EXCLUDED_SUBAGENT_FIELDS, + transformFrontMatter: (subAgent, context) => transformCodexSubAgentFrontMatter(resolveSubAgentCanonicalName(subAgent), context.sourceFrontMatter), + fieldOrder: CODEX_SUBAGENT_FIELD_ORDER + }, + cleanup: { + delete: { + project: { + dirs: ['.codex/agents'] + }, + global: { + files: ['.codex/AGENTS.md'], + dirs: ['.codex/prompts'], + globs: ['.codex/skills/*'] + } + }, + protect: { + global: { + dirs: [`.codex/${SKILLS_SUBDIR}/${PRESERVED_SYSTEM_SKILL_DIR}`] + } + } + }, + wslMirrors: [ + '~/.codex/config.toml', + '~/.codex/auth.json' + ], + dependsOn: [PLUGIN_NAMES.AgentsOutput], + capabilities: { + prompt: { + scopes: ['global'], + singleScope: false + }, + commands: { + scopes: ['global'], + singleScope: true + }, + subagents: { + scopes: ['project'], + singleScope: true + } + } +} satisfies AbstractOutputPluginOptions + +export class CodexCLIOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('CodexCLIOutputPlugin', CODEX_OUTPUT_OPTIONS) + } + + /** + * Project-scoped output still writes to the workspace project, but Codex also + * resolves user-installed skills from `~/.codex/skills/`. Cleanup therefore + * needs to prune that global skills directory as well, while preserving the + * built-in `.system/` subtree. + */ + override async declareCleanupPaths(ctx: OutputCleanContext): Promise { + const declarations = await super.declareCleanupPaths(ctx) + + return { + ...declarations, + delete: (declarations.delete ?? []).map(target => { + if (target.kind !== 'glob') return target + + const normalizedPath = target.path.replaceAll('\\', '/') + if (!normalizedPath.endsWith(`/.codex/${SKILLS_SUBDIR}/*`)) return target + + return { + ...target, + excludeBasenames: [PRESERVED_SYSTEM_SKILL_DIR] + } + }) + } + } +} diff --git a/sdk/src/plugins/CursorOutputPlugin.test.ts b/sdk/src/plugins/CursorOutputPlugin.test.ts new file mode 100644 index 00000000..17fab4a4 --- /dev/null +++ b/sdk/src/plugins/CursorOutputPlugin.test.ts @@ -0,0 +1,351 @@ +import type {CommandPrompt, GlobalMemoryPrompt, OutputCleanContext, OutputWriteContext, RulePrompt, SkillPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {collectDeletionTargets} from '@/commands/CleanupUtils' +import {CursorOutputPlugin} from './CursorOutputPlugin' +import {createLogger, FilePathKind, PromptKind} from './plugin-core' + +class TestCursorOutputPlugin extends CursorOutputPlugin { + constructor(private readonly testHomeDir: string) { + super() + } + + protected override getHomeDir(): string { + return this.testHomeDir + } +} + +function createCleanContext(): OutputCleanContext { + return { + logger: { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + }, + fs, + path, + glob, + dryRun: true, + runtimeTargets: { + jetbrainsCodexDirs: [] + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Relative, + path: '.', + basePath: '.', + getDirectoryName: () => '.', + getAbsolutePath: () => path.resolve('.') + }, + projects: [] + } + } + } as OutputCleanContext +} + +function createCommandPrompt(): CommandPrompt { + return { + type: PromptKind.Command, + content: 'command body', + length: 12, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'commands/dev/build.mdx', + basePath: path.resolve('tmp/dist/commands'), + getDirectoryName: () => 'dev', + getAbsolutePath: () => path.resolve('tmp/dist/commands/dev/build.mdx') + }, + commandPrefix: 'dev', + commandName: 'build', + yamlFrontMatter: { + description: 'Build', + scope: 'project' + }, + markdownContents: [] + } as CommandPrompt +} + +function createGlobalMemoryPrompt(): GlobalMemoryPrompt { + return { + type: PromptKind.GlobalMemory, + content: 'global prompt', + length: 13, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'aindex/dist/global.mdx', + basePath: path.resolve('.'), + getDirectoryName: () => 'dist', + getAbsolutePath: () => path.resolve('aindex/dist/global.mdx') + }, + markdownContents: [] + } as GlobalMemoryPrompt +} + +function createSkillPrompt( + scope: 'project' | 'global' = 'project', + name: string = 'ship-it' +): SkillPrompt { + return { + type: PromptKind.Skill, + content: 'skill body', + length: 10, + filePathKind: FilePathKind.Relative, + skillName: name, + dir: { + pathKind: FilePathKind.Relative, + path: `skills/${name}`, + basePath: path.resolve('tmp/dist/skills'), + getDirectoryName: () => name, + getAbsolutePath: () => path.resolve('tmp/dist/skills', name) + }, + yamlFrontMatter: { + description: 'Ship release', + scope + }, + mcpConfig: { + type: PromptKind.SkillMcpConfig, + mcpServers: { + inspector: { + command: 'npx', + args: ['inspector'] + } + }, + rawContent: '{"mcpServers":{"inspector":{"command":"npx","args":["inspector"]}}}' + }, + markdownContents: [] + } as SkillPrompt +} + +function createRulePrompt(): RulePrompt { + return { + type: PromptKind.Rule, + content: 'rule body', + length: 9, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'rules/ops/guard.mdx', + basePath: path.resolve('tmp/dist/rules'), + getDirectoryName: () => 'ops', + getAbsolutePath: () => path.resolve('tmp/dist/rules/ops/guard.mdx') + }, + prefix: 'ops', + ruleName: 'guard', + globs: ['src/**'], + scope: 'project', + markdownContents: [] + } as RulePrompt +} + +describe('cursorOutputPlugin cleanup', () => { + it('declares cleanup exclusions for built-in skills and lets core cleanup skip them', async () => { + const tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cursor-cleanup-')) + const skillsDir = path.join(tempHomeDir, '.cursor', 'skills-cursor') + const preservedDir = path.join(skillsDir, 'create-rule') + const staleDir = path.join(skillsDir, 'legacy-skill') + + fs.mkdirSync(preservedDir, {recursive: true}) + fs.mkdirSync(staleDir, {recursive: true}) + fs.writeFileSync(path.join(preservedDir, 'SKILL.md'), '# preserved', 'utf8') + fs.writeFileSync(path.join(staleDir, 'SKILL.md'), '# stale', 'utf8') + + try { + const plugin = new TestCursorOutputPlugin(tempHomeDir) + const result = await plugin.declareCleanupPaths(createCleanContext()) + const protectPaths = result.protect?.map(target => target.path.replaceAll('\\', '/')) ?? [] + const normalizedCommandsDir = path.join(tempHomeDir, '.cursor', 'commands').replaceAll('\\', '/') + const normalizedStaleDir = staleDir.replaceAll('\\', '/') + const normalizedPreservedDir = preservedDir.replaceAll('\\', '/') + const skillCleanupTarget = result.delete?.find(target => target.kind === 'glob' && target.path.includes('skills')) + const cleanupPlan = await collectDeletionTargets([plugin], createCleanContext()) + const normalizedDeleteDirs = cleanupPlan.dirsToDelete.map(target => target.replaceAll('\\', '/')) + + expect(result.delete?.map(target => target.path.replaceAll('\\', '/')) ?? []).toContain(normalizedCommandsDir) + expect(skillCleanupTarget?.excludeBasenames).toEqual(expect.arrayContaining(['create-rule'])) + expect(normalizedDeleteDirs).toContain(normalizedStaleDir) + expect(normalizedDeleteDirs).not.toContain(normalizedPreservedDir) + expect(protectPaths).toContain(normalizedPreservedDir) + } + finally { + fs.rmSync(tempHomeDir, {recursive: true, force: true}) + } + }) + + it('writes project-scoped commands, skills, mcp, and rules into workspace root through the synthetic workspace project', async () => { + const workspaceBase = path.resolve('tmp/cursor-workspace') + const plugin = new TestCursorOutputPlugin(path.join(workspaceBase, 'home')) + const ctx = { + logger: createLogger('CursorOutputPlugin', 'error'), + fs, + path, + glob, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [{ + name: '__workspace__', + isWorkspaceRootProject: true + }] + }, + commands: [createCommandPrompt()], + skills: [createSkillPrompt()], + rules: [createRulePrompt()] + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(workspaceBase, '.cursor', 'commands', 'dev-build.md')) + expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills', 'ship-it', 'SKILL.md')) + expect(paths).toContain(path.join(workspaceBase, '.cursor', 'mcp.json')) + expect(paths).toContain(path.join(workspaceBase, '.cursor', 'rules', 'rule-ops-guard.md')) + expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) + }) + + it('keeps skill files global when only mcp is project-scoped', async () => { + const workspaceBase = path.resolve('tmp/cursor-split-scope-project-mcp') + const homeDir = path.join(workspaceBase, 'home') + const plugin = new TestCursorOutputPlugin(homeDir) + const ctx = { + logger: createLogger('CursorOutputPlugin', 'error'), + fs, + path, + glob, + dryRun: true, + pluginOptions: { + outputScopes: { + plugins: { + CursorOutputPlugin: { + skills: 'global', + mcp: 'project' + } + } + } + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [{ + name: '__workspace__', + isWorkspaceRootProject: true + }] + }, + skills: [ + createSkillPrompt('project', 'inspect-locally'), + createSkillPrompt('global', 'ship-it') + ] + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'ship-it', 'SKILL.md')) + expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills', 'inspect-locally', 'mcp.json')) + expect(paths).toContain(path.join(workspaceBase, '.cursor', 'mcp.json')) + expect(paths).not.toContain(path.join(workspaceBase, '.cursor', 'skills', 'ship-it', 'SKILL.md')) + expect(paths).not.toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'inspect-locally', 'SKILL.md')) + expect(paths).not.toContain(path.join(homeDir, '.cursor', 'mcp.json')) + }) + + it('keeps skill files project-scoped when only mcp is global-scoped', async () => { + const workspaceBase = path.resolve('tmp/cursor-split-scope-global-mcp') + const homeDir = path.join(workspaceBase, 'home') + const plugin = new TestCursorOutputPlugin(homeDir) + const ctx = { + logger: createLogger('CursorOutputPlugin', 'error'), + fs, + path, + glob, + dryRun: true, + pluginOptions: { + outputScopes: { + plugins: { + CursorOutputPlugin: { + skills: 'project', + mcp: 'global' + } + } + } + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [{ + name: '__workspace__', + isWorkspaceRootProject: true + }] + }, + skills: [ + createSkillPrompt('project', 'ship-it'), + createSkillPrompt('global', 'inspect-globally') + ] + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills', 'ship-it', 'SKILL.md')) + expect(paths).toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'inspect-globally', 'mcp.json')) + expect(paths).toContain(path.join(homeDir, '.cursor', 'mcp.json')) + expect(paths).not.toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'ship-it', 'SKILL.md')) + expect(paths).not.toContain(path.join(workspaceBase, '.cursor', 'skills', 'inspect-globally', 'SKILL.md')) + expect(paths).not.toContain(path.join(workspaceBase, '.cursor', 'mcp.json')) + }) + + it('writes the global prompt to workspace root through the synthetic workspace project', async () => { + const workspaceBase = path.resolve('tmp/cursor-workspace-global-prompt') + const plugin = new TestCursorOutputPlugin(path.join(workspaceBase, 'home')) + const ctx = { + logger: createLogger('CursorOutputPlugin', 'error'), + fs, + path, + glob, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [{ + name: '__workspace__', + isWorkspaceRootProject: true + }] + }, + globalMemory: createGlobalMemoryPrompt() + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + + expect(declarations.map(declaration => declaration.path)).toContain( + path.join(workspaceBase, '.cursor', 'rules', 'global.mdc') + ) + }) +}) diff --git a/sdk/src/plugins/CursorOutputPlugin.ts b/sdk/src/plugins/CursorOutputPlugin.ts new file mode 100644 index 00000000..4fc08530 --- /dev/null +++ b/sdk/src/plugins/CursorOutputPlugin.ts @@ -0,0 +1,561 @@ +import type { + CommandPrompt, + OutputCleanContext, + OutputCleanupDeclarations, + OutputFileDeclaration, + OutputWriteContext, + RulePrompt, + SkillPrompt +} from './plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import { + AbstractOutputPlugin, + applySubSeriesGlobPrefix, + collectMcpServersFromSkills, + filterByProjectConfig, + GlobalConfigDirs, + IgnoreFiles, + OutputFileNames, + OutputSubdirectories, + PLUGIN_NAMES, + PreservedSkills, + transformMcpConfigForCursor, + transformMcpServerMap +} from './plugin-core' + +const GLOBAL_CONFIG_DIR = GlobalConfigDirs.CURSOR +const MCP_CONFIG_FILE = OutputFileNames.MCP_CONFIG +const COMMANDS_SUBDIR = OutputSubdirectories.COMMANDS +const RULES_SUBDIR = OutputSubdirectories.RULES +const GLOBAL_RULE_FILE = OutputFileNames.CURSOR_GLOBAL_RULE +const SKILLS_CURSOR_SUBDIR = OutputSubdirectories.CURSOR_SKILLS +const SKILLS_PROJECT_SUBDIR = 'skills' +const SKILL_FILE_NAME = OutputFileNames.SKILL +const PRESERVED_SKILLS = PreservedSkills.CURSOR + +type CursorOutputSource + = | {readonly kind: 'command', readonly command: CommandPrompt} + | { + readonly kind: 'mcpConfig' + readonly mcpServers: Record> + } + | {readonly kind: 'skill', readonly skill: SkillPrompt} + | {readonly kind: 'skillMcpConfig', readonly rawContent: string} + | {readonly kind: 'skillChildDoc', readonly content: string} + | { + readonly kind: 'skillResource' + readonly content: string + readonly encoding: 'text' | 'base64' + } + | {readonly kind: 'globalRuleContent', readonly content: string} + | {readonly kind: 'ruleMdc', readonly rule: RulePrompt} + | {readonly kind: 'ignoreFile', readonly content: string} + +export class CursorOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('CursorOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: '', + treatWorkspaceRootProjectAsProject: true, + dependsOn: [PLUGIN_NAMES.AgentsOutput], + indexignore: IgnoreFiles.CURSOR, + commands: { + subDir: COMMANDS_SUBDIR, + transformFrontMatter: (_cmd, context) => + context.sourceFrontMatter ?? {} + }, + skills: { + subDir: SKILLS_CURSOR_SUBDIR + }, + rules: { + subDir: RULES_SUBDIR, + prefix: 'rule', + sourceScopes: ['project', 'global'] + }, + cleanup: { + delete: { + project: { + files: ['.cursor/mcp.json'], + dirs: ['.cursor/commands', '.cursor/rules'], + globs: ['.cursor/skills/*', '.cursor/skills-cursor/*'] + }, + global: { + files: ['.cursor/mcp.json'], + dirs: ['.cursor/commands', '.cursor/rules'], + globs: ['.cursor/skills-cursor/*'] + } + }, + protect: { + global: { + dirs: Array.from( + PRESERVED_SKILLS, + skillName => `.cursor/skills-cursor/${skillName}` + ) + } + }, + excludeScanGlobs: Array.from( + PRESERVED_SKILLS, + skillName => `.cursor/skills-cursor/${skillName}/**` + ) + }, + capabilities: { + prompt: { + scopes: ['global'], + singleScope: false + }, + rules: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'global'], + singleScope: true + }, + mcp: { + scopes: ['project', 'global'], + singleScope: true + } + } + }) + } + + override async declareCleanupPaths( + ctx: OutputCleanContext + ): Promise { + const declarations = await super.declareCleanupPaths(ctx) + return { + ...declarations, + delete: (declarations.delete ?? []).map(target => { + if (target.kind !== 'glob') return target + + const normalizedPath = target.path.replaceAll('\\', '/') + if (!normalizedPath.endsWith(`/.cursor/${SKILLS_CURSOR_SUBDIR}/*`)) + { return target } + + return { + ...target, + excludeBasenames: [...PRESERVED_SKILLS] + } + }) + } + } + + override async declareOutputFiles( + ctx: OutputWriteContext + ): Promise { + const declarations: OutputFileDeclaration[] = [] + const {globalMemory, commands, skills, rules, aiAgentIgnoreConfigFiles} + = ctx.collectedOutputContext + const globalDir = this.getGlobalConfigDir() + const promptSourceProjectConfig + = this.resolvePromptSourceProjectConfig(ctx) + const concreteProjects = this.getConcreteProjects(ctx) + const promptProjects = this.getProjectPromptOutputProjects(ctx) + const transformOptions = this.getTransformOptionsFromContext(ctx, { + includeSeriesPrefix: true + }) + const activePromptScopes = new Set( + this.selectPromptScopes(ctx, ['global']) + ) + const activeRuleScopes = new Set( + rules != null ? this.selectRuleScopes(ctx, rules) : [] + ) + const selectedSkills + = skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + const selectedMcpSkills + = skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'mcp') + ?? this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + const selectedCommands + = commands != null + ? this.selectSingleScopeItems( + commands, + this.commandsConfig.sourceScopes, + command => this.resolveCommandSourceScope(command), + this.getTopicScopeOverride(ctx, 'commands') + ) + : {items: [] as readonly CommandPrompt[]} + + const pushSkillDeclarations = ( + baseDir: string, + scope: 'project' | 'global', + filteredSkills: readonly SkillPrompt[] + ): void => { + const skillsSubDir + = scope === 'global' ? SKILLS_CURSOR_SUBDIR : SKILLS_PROJECT_SUBDIR + for (const skill of filteredSkills) { + const skillName = this.getSkillName(skill) + if (this.isPreservedSkill(skillName)) continue + + const skillDir = path.join(baseDir, skillsSubDir, skillName) + declarations.push({ + path: path.join(skillDir, SKILL_FILE_NAME), + scope, + source: {kind: 'skill', skill} satisfies CursorOutputSource + }) + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: path.join( + skillDir, + childDoc.relativePath.replace(/\.mdx$/, '.md') + ), + scope, + source: { + kind: 'skillChildDoc', + content: childDoc.content as string + } satisfies CursorOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope, + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies CursorOutputSource + }) + } + } + } + } + + const pushSkillMcpDeclarations = ( + baseDir: string, + scope: 'project' | 'global', + filteredMcpSkills: readonly SkillPrompt[] + ): void => { + const skillsSubDir + = scope === 'global' ? SKILLS_CURSOR_SUBDIR : SKILLS_PROJECT_SUBDIR + for (const skill of filteredMcpSkills) { + if (skill.mcpConfig == null) continue + + const skillDir = path.join( + baseDir, + skillsSubDir, + this.getSkillName(skill) + ) + declarations.push({ + path: path.join(skillDir, MCP_CONFIG_FILE), + scope, + source: { + kind: 'skillMcpConfig', + rawContent: skill.mcpConfig.rawContent + } satisfies CursorOutputSource + }) + } + } + + const pushMcpDeclaration = ( + baseDir: string, + scope: 'project' | 'global', + filteredSkills: readonly SkillPrompt[] + ): void => { + if (filteredSkills.length === 0) return + + const servers = collectMcpServersFromSkills(filteredSkills, this.log) + if (servers.size === 0) return + + declarations.push({ + path: path.join(baseDir, MCP_CONFIG_FILE), + scope, + source: { + kind: 'mcpConfig', + mcpServers: transformMcpServerMap( + servers, + transformMcpConfigForCursor + ) + } satisfies CursorOutputSource + }) + } + + if ( + selectedSkills.selectedScope === 'project' + || selectedMcpSkills.selectedScope === 'project' + ) { + for (const project of this.getProjectOutputProjects(ctx)) { + const baseDir = this.resolveProjectConfigDir(ctx, project) + if (baseDir == null) continue + + if (selectedSkills.selectedScope === 'project') { + const filteredSkills = filterByProjectConfig( + selectedSkills.items, + project.projectConfig, + 'skills' + ) + pushSkillDeclarations(baseDir, 'project', filteredSkills) + } + + if (selectedMcpSkills.selectedScope === 'project') { + const filteredMcpSkills = filterByProjectConfig( + selectedMcpSkills.items, + project.projectConfig, + 'skills' + ) + pushSkillMcpDeclarations(baseDir, 'project', filteredMcpSkills) + pushMcpDeclaration(baseDir, 'project', filteredMcpSkills) + } + } + } + + if ( + selectedSkills.selectedScope === 'global' + || selectedMcpSkills.selectedScope === 'global' + ) { + if (selectedSkills.selectedScope === 'global') { + const filteredSkills = filterByProjectConfig( + selectedSkills.items, + promptSourceProjectConfig, + 'skills' + ) + pushSkillDeclarations(globalDir, 'global', filteredSkills) + } + + if (selectedMcpSkills.selectedScope === 'global') { + const filteredMcpSkills = filterByProjectConfig( + selectedMcpSkills.items, + promptSourceProjectConfig, + 'skills' + ) + pushSkillMcpDeclarations(globalDir, 'global', filteredMcpSkills) + pushMcpDeclaration(globalDir, 'global', filteredMcpSkills) + } + } + + if (selectedCommands.selectedScope === 'project') { + for (const project of this.getProjectOutputProjects(ctx)) { + const baseDir = this.resolveProjectConfigDir(ctx, project) + if (baseDir == null) continue + + const filteredCommands = filterByProjectConfig( + selectedCommands.items, + project.projectConfig, + 'commands' + ) + for (const command of filteredCommands) { + declarations.push({ + path: path.join( + baseDir, + COMMANDS_SUBDIR, + this.transformCommandName(command, transformOptions) + ), + scope: 'project', + source: {kind: 'command', command} satisfies CursorOutputSource + }) + } + } + } + + if (selectedCommands.selectedScope === 'global') { + const filteredCommands = filterByProjectConfig( + selectedCommands.items, + promptSourceProjectConfig, + 'commands' + ) + for (const command of filteredCommands) { + declarations.push({ + path: path.join( + globalDir, + COMMANDS_SUBDIR, + this.transformCommandName(command, transformOptions) + ), + scope: 'global', + source: {kind: 'command', command} satisfies CursorOutputSource + }) + } + } + + if (rules != null && rules.length > 0) { + const globalRules = rules.filter( + rule => + this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'global' + ) + if (activeRuleScopes.has('global')) { + for (const rule of globalRules) { + declarations.push({ + path: path.join( + globalDir, + RULES_SUBDIR, + this.buildRuleFileName(rule) + ), + scope: 'global', + source: {kind: 'ruleMdc', rule} satisfies CursorOutputSource + }) + } + } + + if (activeRuleScopes.has('project')) { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectBaseDir = this.resolveProjectConfigDir(ctx, project) + if (projectBaseDir == null) continue + const projectRules = applySubSeriesGlobPrefix( + filterByProjectConfig( + rules.filter( + rule => + this.normalizeSourceScope(this.normalizeRuleScope(rule)) + === 'project' + ), + project.projectConfig, + 'rules' + ), + project.projectConfig + ) + for (const rule of projectRules) { + declarations.push({ + path: path.join( + projectBaseDir, + RULES_SUBDIR, + this.buildRuleFileName(rule) + ), + scope: 'project', + source: {kind: 'ruleMdc', rule} satisfies CursorOutputSource + }) + } + } + } + } + + if (globalMemory != null && activePromptScopes.has('global')) { + const globalRuleContent = this.buildGlobalRuleContent( + globalMemory.content as string, + ctx + ) + for (const project of promptProjects) { + const projectBaseDir = this.resolveProjectConfigDir(ctx, project) + if (projectBaseDir == null) continue + declarations.push({ + path: path.join(projectBaseDir, RULES_SUBDIR, GLOBAL_RULE_FILE), + scope: 'project', + source: { + kind: 'globalRuleContent', + content: globalRuleContent + } satisfies CursorOutputSource + }) + } + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile + = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find( + file => file.fileName === this.indexignore + ) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of concreteProjects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) + { continue } + declarations.push({ + path: path.join( + projectDir.basePath, + projectDir.path, + ignoreOutputPath + ), + scope: 'project', + source: { + kind: 'ignoreFile', + content: ignoreFile.content + } satisfies CursorOutputSource + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + const source = declaration.source as CursorOutputSource + switch (source.kind) { + case 'command': + return this.buildCommandContent(source.command, ctx) + case 'mcpConfig': + return JSON.stringify({mcpServers: source.mcpServers}, null, 2) + case 'skill': { + const frontMatterData = this.buildSkillFrontMatter(source.skill) + return this.buildMarkdownContent( + source.skill.content as string, + frontMatterData, + ctx + ) + } + case 'skillMcpConfig': + return source.rawContent + case 'skillChildDoc': + case 'globalRuleContent': + case 'ignoreFile': + return source.content + case 'skillResource': + return source.encoding === 'base64' + ? Buffer.from(source.content, 'base64') + : source.content + case 'ruleMdc': + return this.buildRuleMdcContent(source.rule, ctx) + default: + throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + private buildGlobalRuleContent( + content: string, + ctx: OutputWriteContext + ): string { + return this.buildMarkdownContent( + content, + {description: 'Global prompt (synced)', alwaysApply: true}, + ctx + ) + } + + private isPreservedSkill(name: string): boolean { + return PRESERVED_SKILLS.has(name) + } + + protected buildRuleMdcContent( + rule: RulePrompt, + ctx?: OutputWriteContext + ): string { + const fmData: Record = { + alwaysApply: false, + globs: rule.globs.length > 0 ? rule.globs.join(', ') : '' + } + const raw = this.buildMarkdownContent(rule.content, fmData, ctx) + const lines = raw.split('\n') + const transformedLines = lines.map(line => { + const match = /^(\s*globs:\s*)(['"])(.*)\2\s*$/.exec(line) + if (match == null) return line + const prefix = match[1] ?? 'globs: ' + const value = match[3] ?? '' + if (value.trim().length === 0) return line + return `${prefix}${value}` + }) + return transformedLines.join('\n') + } +} diff --git a/sdk/src/plugins/DroidCLIOutputPlugin.ts b/sdk/src/plugins/DroidCLIOutputPlugin.ts new file mode 100644 index 00000000..5845cfc4 --- /dev/null +++ b/sdk/src/plugins/DroidCLIOutputPlugin.ts @@ -0,0 +1,56 @@ +import type { + OutputWriteContext, + SkillPrompt +} from './plugin-core' +import {AbstractOutputPlugin} from './plugin-core' + +const GLOBAL_MEMORY_FILE = 'AGENTS.md' +const GLOBAL_CONFIG_DIR = '.factory' + +export class DroidCLIOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('DroidCLIOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: GLOBAL_MEMORY_FILE, + treatWorkspaceRootProjectAsProject: true, + commands: { + transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + }, + skills: {}, + cleanup: { + delete: { + project: { + files: [GLOBAL_MEMORY_FILE], + dirs: ['.factory/commands', '.factory/skills'] + }, + global: { + files: ['.factory/AGENTS.md'], + dirs: ['.factory/commands', '.factory/skills'] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'global'], + singleScope: true + } + } + }) // Droid uses default subdir names + } + + protected override buildSkillMainContent(skill: SkillPrompt, ctx?: OutputWriteContext): string { // Droid-specific: Simplify front matter + const simplifiedFrontMatter = skill.yamlFrontMatter != null // Droid-specific: Simplify front matter + ? {name: this.getSkillName(skill), description: skill.yamlFrontMatter.description} + : void 0 + + return this.buildMarkdownContent(skill.content as string, simplifiedFrontMatter, ctx) + } +} diff --git a/sdk/src/plugins/EditorConfigOutputPlugin.ts b/sdk/src/plugins/EditorConfigOutputPlugin.ts new file mode 100644 index 00000000..88038b60 --- /dev/null +++ b/sdk/src/plugins/EditorConfigOutputPlugin.ts @@ -0,0 +1,59 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin} from './plugin-core' + +const EDITOR_CONFIG_FILE = '.editorconfig' + +/** + * Output plugin for writing .editorconfig files to project directories. + * Reads EditorConfig files collected by EditorConfigInputCapability. + */ +export class EditorConfigOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('EditorConfigOutputPlugin', { + cleanup: { + delete: { + project: { + files: [EDITOR_CONFIG_FILE] + } + } + }, + capabilities: {} + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {projects} = ctx.collectedOutputContext.workspace + const {editorConfigFiles} = ctx.collectedOutputContext + + if (editorConfigFiles == null || editorConfigFiles.length === 0) return declarations + + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + for (const config of editorConfigFiles) { + declarations.push({ + path: this.resolvePath(projectDir.basePath, projectDir.path, EDITOR_CONFIG_FILE), + scope: 'project', + source: {content: config.content} + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + void ctx + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } +} diff --git a/sdk/src/plugins/GeminiCLIOutputPlugin.ts b/sdk/src/plugins/GeminiCLIOutputPlugin.ts new file mode 100644 index 00000000..9c4b9bb2 --- /dev/null +++ b/sdk/src/plugins/GeminiCLIOutputPlugin.ts @@ -0,0 +1,57 @@ +import type { + OutputCleanContext, + OutputCleanupDeclarations +} from './plugin-core' +import {AbstractOutputPlugin} from './plugin-core' + +const PROJECT_MEMORY_FILE = 'GEMINI.md' +const GLOBAL_CONFIG_DIR = '.gemini' + +export class GeminiCLIOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('GeminiCLIOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: PROJECT_MEMORY_FILE, + treatWorkspaceRootProjectAsProject: true, + cleanup: { + delete: { + global: { + files: ['.gemini/GEMINI.md'] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + } + } + }) + } + + override async declareCleanupPaths( + ctx: OutputCleanContext + ): Promise { + const declarations = await super.declareCleanupPaths(ctx) + const promptSourceProjects + = ctx.collectedOutputContext.workspace.projects.filter( + project => project.isPromptSourceProject === true + ) + const promptSourceExcludeGlobs = promptSourceProjects + .map(project => project.dirFromWorkspacePath) + .filter((dir): dir is NonNullable => dir != null) + .map(dir => this.resolvePath(dir.basePath, dir.path, '**')) + + return { + ...declarations, + delete: [ + ...declarations.delete ?? [], + ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) + ], + excludeScanGlobs: [ + ...declarations.excludeScanGlobs ?? [], + ...promptSourceExcludeGlobs + ] + } + } +} diff --git a/sdk/src/plugins/GenericSkillsOutputPlugin.test.ts b/sdk/src/plugins/GenericSkillsOutputPlugin.test.ts new file mode 100644 index 00000000..092e54f5 --- /dev/null +++ b/sdk/src/plugins/GenericSkillsOutputPlugin.test.ts @@ -0,0 +1,192 @@ +import type {OutputCleanContext, OutputWriteContext, SkillPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {GenericSkillsOutputPlugin} from './GenericSkillsOutputPlugin' +import {createLogger, FilePathKind, PromptKind} from './plugin-core' + +class TestGenericSkillsOutputPlugin extends GenericSkillsOutputPlugin { + constructor(private readonly testHomeDir: string) { + super() + } + + protected override getHomeDir(): string { + return this.testHomeDir + } +} + +function createSkillPrompt(scope: 'project' | 'global', name: string): SkillPrompt { + return { + type: PromptKind.Skill, + content: 'skill body', + length: 10, + filePathKind: FilePathKind.Relative, + skillName: name, + dir: { + pathKind: FilePathKind.Relative, + path: `skills/${name}`, + basePath: path.resolve('tmp/dist/skills'), + getDirectoryName: () => name, + getAbsolutePath: () => path.resolve('tmp/dist/skills', name) + }, + yamlFrontMatter: { + description: 'Skill description', + scope + }, + mcpConfig: { + type: PromptKind.SkillMcpConfig, + mcpServers: { + inspector: { + command: 'npx', + args: ['inspector'] + } + }, + rawContent: '{"mcpServers":{"inspector":{"command":"npx","args":["inspector"]}}}' + }, + markdownContents: [] + } as SkillPrompt +} + +function createContext( + workspaceBase: string, + pluginOptions?: OutputWriteContext['pluginOptions'], + skills: readonly SkillPrompt[] = [createSkillPrompt('project', 'ship-it')] +): OutputWriteContext { + return { + logger: createLogger('GenericSkillsOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + pluginOptions, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [{ + name: '__workspace__', + isWorkspaceRootProject: true + }] + }, + skills: [...skills] + } + } as OutputWriteContext +} + +function createCleanContext(): OutputCleanContext { + return { + logger: createLogger('GenericSkillsOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + runtimeTargets: { + jetbrainsCodexDirs: [] + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Relative, + path: '.', + basePath: '.', + getDirectoryName: () => '.', + getAbsolutePath: () => path.resolve('.') + }, + projects: [] + } + } + } as OutputCleanContext +} + +describe('genericSkillsOutputPlugin synthetic workspace project output', () => { + it('writes project-scoped skills into workspace root .agents/skills via the synthetic workspace project', async () => { + const workspaceBase = path.resolve('tmp/generic-skills-workspace') + const plugin = new TestGenericSkillsOutputPlugin(path.resolve('tmp/generic-skills-home')) + const ctx = createContext(workspaceBase) + + const declarations = await plugin.declareOutputFiles(ctx) + + expect(declarations.map(declaration => declaration.path)).toContain( + path.join(workspaceBase, '.agents', 'skills', 'ship-it', 'SKILL.md') + ) + expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) + }) + + it('writes global mcp.json even when skill files stay project-scoped', async () => { + const workspaceBase = path.resolve('tmp/generic-skills-workspace') + const homeDir = path.resolve('tmp/generic-skills-home') + const plugin = new TestGenericSkillsOutputPlugin(homeDir) + const skills = [ + createSkillPrompt('project', 'ship-it'), + createSkillPrompt('global', 'inspect-globally') + ] + const ctx = createContext(workspaceBase, { + outputScopes: { + plugins: { + GenericSkillsOutputPlugin: { + skills: 'project', + mcp: 'global' + } + } + } + }, skills) + + const declarations = await plugin.declareOutputFiles(ctx) + + expect(declarations.map(declaration => declaration.path)).toContain( + path.join(workspaceBase, '.agents', 'skills', 'ship-it', 'SKILL.md') + ) + expect(declarations.map(declaration => declaration.path)).toContain( + path.join(homeDir, '.agents', 'skills', 'inspect-globally', 'mcp.json') + ) + }) + + it('writes project mcp.json even when skill files stay global-scoped', async () => { + const workspaceBase = path.resolve('tmp/generic-skills-workspace') + const homeDir = path.resolve('tmp/generic-skills-home') + const plugin = new TestGenericSkillsOutputPlugin(homeDir) + const skills = [ + createSkillPrompt('project', 'inspect-locally'), + createSkillPrompt('global', 'ship-it') + ] + const ctx = createContext(workspaceBase, { + outputScopes: { + plugins: { + GenericSkillsOutputPlugin: { + skills: 'global', + mcp: 'project' + } + } + } + }, skills) + + const declarations = await plugin.declareOutputFiles(ctx) + + expect(declarations.map(declaration => declaration.path)).toContain( + path.join(homeDir, '.agents', 'skills', 'ship-it', 'SKILL.md') + ) + expect(declarations.map(declaration => declaration.path)).toContain( + path.join(workspaceBase, '.agents', 'skills', 'inspect-locally', 'mcp.json') + ) + }) +}) + +describe('genericSkillsOutputPlugin cleanup', () => { + it('declares cleanup for the full legacy global ~/.skills directory', async () => { + const homeDir = path.resolve('tmp/generic-skills-home') + const plugin = new TestGenericSkillsOutputPlugin(homeDir) + + const cleanup = await plugin.declareCleanupPaths(createCleanContext()) + const deletePaths = cleanup.delete?.map(target => target.path.replaceAll('\\', '/')) ?? [] + + expect(deletePaths).toContain( + path.join(homeDir, '.agents', 'skills').replaceAll('\\', '/') + ) + expect(deletePaths).toContain( + path.join(homeDir, '.skills').replaceAll('\\', '/') + ) + }) +}) diff --git a/sdk/src/plugins/GenericSkillsOutputPlugin.ts b/sdk/src/plugins/GenericSkillsOutputPlugin.ts new file mode 100644 index 00000000..c7698ab3 --- /dev/null +++ b/sdk/src/plugins/GenericSkillsOutputPlugin.ts @@ -0,0 +1,245 @@ +import type { + OutputFileDeclaration, + OutputWriteContext, + SkillPrompt +} from './plugin-core' + +import {Buffer} from 'node:buffer' +import {AbstractOutputPlugin, filterByProjectConfig} from './plugin-core' + +const PROJECT_SKILLS_DIR = '.agents/skills' +const LEGACY_SKILLS_DIR = '.skills' +const SKILL_FILE_NAME = 'SKILL.md' +const MCP_CONFIG_FILE = 'mcp.json' + +type GenericSkillOutputSource + = | {readonly kind: 'skillMain', readonly skill: SkillPrompt} + | {readonly kind: 'skillMcp', readonly rawContent: string} + | {readonly kind: 'skillChildDoc', readonly content: string} + | { + readonly kind: 'skillResource' + readonly content: string + readonly encoding: 'text' | 'base64' + } + +/** + * Output plugin that writes skills directly to each project's .agents/skills/ directory. + * + * Structure: + * - Project: /.agents/skills//SKILL.md, mcp.json, child docs, resources + * + * @deprecated Legacy compact skills output. Cleanup must remove the entire + * global `~/.skills/` directory in addition to the current skill targets. + */ +export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('GenericSkillsOutputPlugin', { + outputFileName: SKILL_FILE_NAME, + treatWorkspaceRootProjectAsProject: true, + skills: {}, + cleanup: { + delete: { + project: { + dirs: [PROJECT_SKILLS_DIR, LEGACY_SKILLS_DIR] + }, + global: { + dirs: [PROJECT_SKILLS_DIR, LEGACY_SKILLS_DIR] + } + } + }, + capabilities: { + skills: { + scopes: ['project', 'global'], + singleScope: true + }, + mcp: { + scopes: ['project', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles( + ctx: OutputWriteContext + ): Promise { + const declarations: OutputFileDeclaration[] = [] + const {skills} = ctx.collectedOutputContext + + if (skills == null || skills.length === 0) return declarations + + const selectedSkills = this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'skills') + ) + const selectedMcpSkills = this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'mcp') + ?? this.getTopicScopeOverride(ctx, 'skills') + ) + + const pushSkillDeclarations = ( + baseSkillsDir: string, + scope: 'project' | 'global', + filteredSkills: readonly SkillPrompt[] + ): void => { + for (const skill of filteredSkills) { + const skillName = this.getSkillName(skill) + const skillDir = this.joinPath(baseSkillsDir, skillName) + + declarations.push({ + path: this.joinPath(skillDir, SKILL_FILE_NAME), + scope, + source: { + kind: 'skillMain', + skill + } satisfies GenericSkillOutputSource + }) + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: this.joinPath( + skillDir, + childDoc.relativePath.replace(/\.mdx$/, '.md') + ), + scope, + source: { + kind: 'skillChildDoc', + content: childDoc.content as string + } satisfies GenericSkillOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: this.joinPath(skillDir, resource.relativePath), + scope, + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies GenericSkillOutputSource + }) + } + } + } + } + + const pushMcpDeclarations = ( + baseSkillsDir: string, + scope: 'project' | 'global', + filteredMcpSkills: readonly SkillPrompt[] + ): void => { + for (const skill of filteredMcpSkills) { + if (skill.mcpConfig == null) continue + + declarations.push({ + path: this.joinPath( + baseSkillsDir, + this.getSkillName(skill), + MCP_CONFIG_FILE + ), + scope, + source: { + kind: 'skillMcp', + rawContent: skill.mcpConfig.rawContent + } satisfies GenericSkillOutputSource + }) + } + } + + if ( + selectedSkills.selectedScope === 'project' + || selectedMcpSkills.selectedScope === 'project' + ) { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + + const filteredSkills = filterByProjectConfig( + selectedSkills.items, + project.projectConfig, + 'skills' + ) + const filteredMcpSkills = filterByProjectConfig( + selectedMcpSkills.items, + project.projectConfig, + 'skills' + ) + const baseSkillsDir = this.joinPath(projectRootDir, PROJECT_SKILLS_DIR) + + if ( + selectedSkills.selectedScope === 'project' + && filteredSkills.length > 0 + ) + { pushSkillDeclarations(baseSkillsDir, 'project', filteredSkills) } + + if (selectedMcpSkills.selectedScope === 'project') + { pushMcpDeclarations(baseSkillsDir, 'project', filteredMcpSkills) } + } + } + + if ( + selectedSkills.selectedScope !== 'global' + && selectedMcpSkills.selectedScope !== 'global' + ) + { return declarations } + + const baseSkillsDir = this.joinPath(this.getHomeDir(), PROJECT_SKILLS_DIR) + const promptSourceProjectConfig + = this.resolvePromptSourceProjectConfig(ctx) + if (selectedSkills.selectedScope === 'global') { + const filteredSkills = filterByProjectConfig( + selectedSkills.items, + promptSourceProjectConfig, + 'skills' + ) + if (filteredSkills.length > 0) + { pushSkillDeclarations(baseSkillsDir, 'global', filteredSkills) } + } + + if (selectedMcpSkills.selectedScope !== 'global') return declarations + + const filteredMcpSkills = filterByProjectConfig( + selectedMcpSkills.items, + promptSourceProjectConfig, + 'skills' + ) + pushMcpDeclarations(baseSkillsDir, 'global', filteredMcpSkills) + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + const source = declaration.source as GenericSkillOutputSource + switch (source.kind) { + case 'skillMain': { + const frontMatterData = this.buildSkillFrontMatter(source.skill) + return this.buildMarkdownContent( + source.skill.content as string, + frontMatterData, + ctx + ) + } + case 'skillMcp': + return source.rawContent + case 'skillChildDoc': + return source.content + case 'skillResource': + return source.encoding === 'base64' + ? Buffer.from(source.content, 'base64') + : source.content + default: + throw new Error(`Unsupported declaration source for ${this.name}`) + } + } +} diff --git a/sdk/src/plugins/GitExcludeOutputPlugin.ts b/sdk/src/plugins/GitExcludeOutputPlugin.ts new file mode 100644 index 00000000..8f20b92d --- /dev/null +++ b/sdk/src/plugins/GitExcludeOutputPlugin.ts @@ -0,0 +1,90 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import * as path from 'node:path' +import {AbstractOutputPlugin, findAllGitRepos, resolveGitInfoDir} from './plugin-core' + +export class GitExcludeOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('GitExcludeOutputPlugin', {capabilities: {}}) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {workspace, globalGitIgnore, shadowGitExclude} = ctx.collectedOutputContext + const managedContent = this.buildManagedContent(globalGitIgnore, shadowGitExclude) + if (managedContent.length === 0) return declarations + + const finalContent = this.normalizeContent(managedContent) + const writtenPaths = new Set() + const {projects} = workspace + + for (const project of projects) { + if (project.dirFromWorkspacePath == null) continue + + const projectDir = project.dirFromWorkspacePath.getAbsolutePath() + const gitRepoDirs = [projectDir, ...findAllGitRepos(projectDir)] + + for (const repoDir of gitRepoDirs) { + const gitInfoDir = resolveGitInfoDir(repoDir) + if (gitInfoDir == null) continue + + const excludePath = path.join(gitInfoDir, 'exclude') + if (writtenPaths.has(excludePath)) continue + writtenPaths.add(excludePath) + + declarations.push({ + path: excludePath, + scope: 'project', + source: {content: finalContent} + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + void ctx + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } + + private buildManagedContent(globalGitIgnore?: string, shadowGitExclude?: string): string { + const parts: string[] = [] + + if (globalGitIgnore != null && globalGitIgnore.trim().length > 0) { // Handle globalGitIgnore first + const sanitized = this.sanitizeContent(globalGitIgnore) + if (sanitized.length > 0) parts.push(sanitized) + } + + if (shadowGitExclude != null && shadowGitExclude.trim().length > 0) { // Handle shadowGitExclude + const sanitized = this.sanitizeContent(shadowGitExclude) + if (sanitized.length > 0) parts.push(sanitized) + } + + if (parts.length === 0) return '' // Return early if no content was added + return parts.join('\n') + } + + private sanitizeContent(content: string): string { + const lines = content.split(/\r?\n/) + const filtered = lines.filter(line => { + const trimmed = line.trim() + if (trimmed.length === 0) return true + return !(trimmed.startsWith('#') && !trimmed.startsWith('\\#')) + }) + return filtered.join('\n').trim() + } + + private normalizeContent(content: string): string { + const trimmed = content.trim() + if (trimmed.length === 0) return '' + return `${trimmed}\n` + } +} diff --git a/sdk/src/plugins/JetBrainsAIAssistantCodexOutputPlugin.ts b/sdk/src/plugins/JetBrainsAIAssistantCodexOutputPlugin.ts new file mode 100644 index 00000000..91c0b9c5 --- /dev/null +++ b/sdk/src/plugins/JetBrainsAIAssistantCodexOutputPlugin.ts @@ -0,0 +1,366 @@ +import type { + CommandPrompt, + OutputCleanContext, + OutputCleanupDeclarations, + OutputFileDeclaration, + OutputPluginContext, + OutputWriteContext, + ProjectChildrenMemoryPrompt, + SkillPrompt +} from './plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import {AbstractOutputPlugin, filterByProjectConfig, PLUGIN_NAMES} from './plugin-core' + +const PROJECT_MEMORY_FILE = 'AGENTS.md' +const PROMPTS_SUBDIR = 'prompts' +const SKILLS_SUBDIR = 'skills' +const SKILL_FILE_NAME = 'SKILL.md' +const AIASSISTANT_DIR = '.aiassistant' +const CODEX_DIR = 'codex' +const RULES_SUBDIR = 'rules' +const ROOT_RULE_FILE = 'always.md' +const CHILD_RULE_FILE_PREFIX = 'glob-' +const RULE_APPLY_ALWAYS = '始终' +const RULE_APPLY_GLOB = '按文件模式' +const RULE_GLOB_KEY = '模式' +type JetBrainsCodexOutputSource + = | {readonly kind: 'projectRuleContent', readonly content: string} + | {readonly kind: 'globalMemory', readonly content: string} + | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'skill', readonly skill: SkillPrompt} + | {readonly kind: 'skillReference', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | {readonly kind: 'ignoreFile', readonly content: string} + +export class JetBrainsAIAssistantCodexOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('JetBrainsAIAssistantCodexOutputPlugin', { + outputFileName: PROJECT_MEMORY_FILE, + treatWorkspaceRootProjectAsProject: true, + commands: { + subDir: PROMPTS_SUBDIR, + transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + }, + skills: { + subDir: SKILLS_SUBDIR + }, + dependsOn: [PLUGIN_NAMES.AgentsOutput], + indexignore: '.aiignore', + cleanup: { + delete: { + project: { + dirs: ['.aiassistant/rules', '.aiassistant/codex/prompts', '.aiassistant/codex/skills'] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {globalMemory, commands, skills, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const concreteProjects = this.getConcreteProjects(ctx) + const promptProjects = this.getProjectPromptOutputProjects(ctx) + const codexDirs = this.getJetBrainsCodexDirs(ctx) + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) + const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) + const selectedCommands = commands != null + ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, command => this.resolveCommandSourceScope(command), this.getTopicScopeOverride(ctx, 'commands')) + : {items: [] as readonly CommandPrompt[]} + const selectedSkills = skills != null + ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) + : {items: [] as readonly SkillPrompt[]} + const transformOptions = this.getTransformOptionsFromContext(ctx) + + if (activePromptScopes.has('project')) { + for (const project of promptProjects) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + const rulesDir = path.join(projectRootDir, AIASSISTANT_DIR, RULES_SUBDIR) + + if (project.rootMemoryPrompt != null) { + declarations.push({ + path: path.join(rulesDir, ROOT_RULE_FILE), + scope: 'project', + source: { + kind: 'projectRuleContent', + content: this.buildAlwaysRuleContent(project.rootMemoryPrompt.content as string, ctx) + } satisfies JetBrainsCodexOutputSource + }) + } + + if (project.childMemoryPrompts != null) { + for (const child of project.childMemoryPrompts) { + declarations.push({ + path: path.join(rulesDir, this.buildChildRuleFileName(child)), + scope: 'project', + source: { + kind: 'projectRuleContent', + content: this.buildGlobRuleContent(child, ctx) + } satisfies JetBrainsCodexOutputSource + }) + } + } + } + } + + const pushSkillDeclarations = ( + basePath: string, + scope: 'project' | 'global', + filteredSkills: readonly SkillPrompt[] + ): void => { + for (const skill of filteredSkills) { + const skillName = this.getSkillName(skill) + const skillDir = path.join(basePath, SKILLS_SUBDIR, skillName) + declarations.push({ + path: path.join(skillDir, SKILL_FILE_NAME), + scope, + source: {kind: 'skill', skill} satisfies JetBrainsCodexOutputSource + }) + + if (skill.childDocs != null) { + for (const refDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, refDoc.dir.path.replace(/\.mdx$/, '.md')), + scope, + source: { + kind: 'skillReference', + content: refDoc.content as string + } satisfies JetBrainsCodexOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope, + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies JetBrainsCodexOutputSource + }) + } + } + } + } + + if (selectedCommands.selectedScope === 'project' || selectedSkills.selectedScope === 'project') { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + + const projectCodexDir = path.join(projectRootDir, AIASSISTANT_DIR, CODEX_DIR) + if (selectedCommands.selectedScope === 'project') { + const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') + for (const command of filteredCommands) { + declarations.push({ + path: path.join(projectCodexDir, PROMPTS_SUBDIR, this.transformCommandName(command, transformOptions)), + scope: 'project', + source: {kind: 'command', command} satisfies JetBrainsCodexOutputSource + }) + } + } + + if (selectedSkills.selectedScope === 'project') { + const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') + pushSkillDeclarations(projectCodexDir, 'project', filteredSkills) + } + } + } + + if (codexDirs.length > 0) { + if (globalMemory != null && activePromptScopes.has('global')) { + for (const codexDir of codexDirs) { + declarations.push({ + path: path.join(codexDir, PROJECT_MEMORY_FILE), + scope: 'global', + source: { + kind: 'globalMemory', + content: globalMemory.content as string + } satisfies JetBrainsCodexOutputSource + }) + } + } + + const filteredCommands = selectedCommands.selectedScope === 'global' + ? filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') + : [] + const filteredSkills = selectedSkills.selectedScope === 'global' + ? filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') + : [] + for (const codexDir of codexDirs) { + for (const command of filteredCommands) { + declarations.push({ + path: path.join(codexDir, PROMPTS_SUBDIR, this.transformCommandName(command, transformOptions)), + scope: 'global', + source: {kind: 'command', command} satisfies JetBrainsCodexOutputSource + }) + } + + pushSkillDeclarations(codexDir, 'global', filteredSkills) + } + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of concreteProjects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) continue + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: { + kind: 'ignoreFile', + content: ignoreFile.content + } satisfies JetBrainsCodexOutputSource + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + const source = declaration.source as JetBrainsCodexOutputSource + switch (source.kind) { + case 'projectRuleContent': + case 'globalMemory': + case 'skillReference': + case 'ignoreFile': return source.content + case 'command': return this.buildCommandContent(source.command, ctx) + case 'skill': return this.buildCodexSkillContent(source.skill, ctx) + case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + default: throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + override async declareCleanupPaths(ctx: OutputCleanContext): Promise { + const baseDeclarations = await super.declareCleanupPaths(ctx) + const codexDirs = this.getJetBrainsCodexDirs(ctx) + if (codexDirs.length === 0) return baseDeclarations + + const dynamicGlobalDeletes = codexDirs.flatMap(codexDir => ([ + {path: path.join(codexDir, PROJECT_MEMORY_FILE), kind: 'file', scope: 'global'}, + {path: path.join(codexDir, PROMPTS_SUBDIR), kind: 'directory', scope: 'global'}, + {path: path.join(codexDir, SKILLS_SUBDIR), kind: 'directory', scope: 'global'} + ] as const)) + const baseDeletes = baseDeclarations.delete ?? [] + + return { + ...baseDeclarations, + delete: [ + ...baseDeletes, + ...dynamicGlobalDeletes + ] + } + } + + private getJetBrainsCodexDirs(ctx: OutputPluginContext | OutputWriteContext | OutputCleanContext): readonly string[] { + return ctx.runtimeTargets.jetbrainsCodexDirs + } + + private buildChildRuleFileName(child: ProjectChildrenMemoryPrompt): string { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalizedPath = childPath + .replaceAll('\\', '/') + .replaceAll(/^\/+|\/+$/g, '') + .replaceAll('/', '-') + + const suffix = normalizedPath.length > 0 ? normalizedPath : 'root' + return `${CHILD_RULE_FILE_PREFIX}${suffix}.md` + } + + private buildChildRulePattern(child: ProjectChildrenMemoryPrompt): string { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalizedPath = childPath + .replaceAll('\\', '/') + .replaceAll(/^\/+|\/+$/g, '') + + if (normalizedPath.length === 0) return '**/*' + return `${normalizedPath}/**` + } + + private buildAlwaysRuleContent(content: string, ctx: OutputWriteContext): string { + const fmData: Record = { + apply: RULE_APPLY_ALWAYS + } + + return this.buildMarkdownContent(content, fmData, ctx) + } + + private buildGlobRuleContent(child: ProjectChildrenMemoryPrompt, ctx: OutputWriteContext): string { + const pattern = this.buildChildRulePattern(child) + const fmData: Record = { + apply: RULE_APPLY_GLOB, + [RULE_GLOB_KEY]: pattern + } + + return this.buildMarkdownContent(child.content as string, fmData, ctx) + } + + private buildCodexSkillContent(skill: SkillPrompt, ctx: OutputWriteContext): string { + const fm = skill.yamlFrontMatter + + const name = this.normalizeSkillName(this.getSkillName(skill), 64) + const description = this.normalizeToSingleLine(fm.description, 1024) + + const metadata: Record = {} + + if (fm.displayName != null) metadata['short-description'] = fm.displayName + if (fm.version != null) metadata['version'] = fm.version + if (fm.author != null) metadata['author'] = fm.author + if (fm.keywords != null && fm.keywords.length > 0) metadata['keywords'] = [...fm.keywords] + + const fmData: Record = { + name, + description + } + + if (Object.keys(metadata).length > 0) fmData['metadata'] = metadata + if (fm.allowTools != null && fm.allowTools.length > 0) fmData['allowed-tools'] = fm.allowTools.join(' ') + + return this.buildMarkdownContent(skill.content as string, fmData, ctx) + } + + private normalizeSkillName(name: string, maxLength: number): string { + let normalized = name + .toLowerCase() + .replaceAll(/[^a-z0-9-]/g, '-') + .replaceAll(/-+/g, '-') + .replaceAll(/^-+|-+$/g, '') + + if (normalized.length > maxLength) normalized = normalized.slice(0, maxLength).replace(/-+$/, '') + + return normalized + } + + private normalizeToSingleLine(text: string, maxLength: number): string { + const singleLine = text.replaceAll(/[\r\n]+/g, ' ').replaceAll(/\s+/g, ' ').trim() + if (singleLine.length > maxLength) return `${singleLine.slice(0, maxLength - 3)}...` + return singleLine + } +} diff --git a/sdk/src/plugins/JetBrainsIDECodeStyleConfigOutputPlugin.ts b/sdk/src/plugins/JetBrainsIDECodeStyleConfigOutputPlugin.ts new file mode 100644 index 00000000..aa49103c --- /dev/null +++ b/sdk/src/plugins/JetBrainsIDECodeStyleConfigOutputPlugin.ts @@ -0,0 +1,68 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin, IDEKind} from './plugin-core' + +const IDEA_DIR = '.idea' +const CODE_STYLES_DIR = 'codeStyles' + +export class JetBrainsIDECodeStyleConfigOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('JetBrainsIDECodeStyleConfigOutputPlugin', { + cleanup: { + delete: { + project: { + files: ['.editorconfig', '.idea/codeStyles/Project.xml', '.idea/codeStyles/codeStyleConfig.xml', '.idea/.gitignore'] + } + } + }, + capabilities: {} + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {projects} = ctx.collectedOutputContext.workspace + const {jetbrainsConfigFiles, editorConfigFiles} = ctx.collectedOutputContext + const jetbrainsConfigs = [...jetbrainsConfigFiles ?? [], ...editorConfigFiles ?? []] + + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + for (const config of jetbrainsConfigs) { + const targetRelativePath = this.getTargetRelativePath(config) + declarations.push({ + path: this.resolvePath(projectDir.basePath, projectDir.path, targetRelativePath), + scope: 'project', + source: {content: config.content} + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + void ctx + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } + + private getTargetRelativePath(config: {type: IDEKind, dir: {path: string}}): string { + const sourcePath = config.dir.path + + if (config.type === IDEKind.EditorConfig) return '.editorconfig' + + if (config.type !== IDEKind.IntellijIDEA) return this.basename(sourcePath) + + const ideaIndex = sourcePath.indexOf(IDEA_DIR) + if (ideaIndex !== -1) return sourcePath.slice(Math.max(0, ideaIndex)) + return this.joinPath(IDEA_DIR, CODE_STYLES_DIR, this.basename(sourcePath)) + } +} diff --git a/sdk/src/plugins/OpencodeCLIOutputPlugin.test.ts b/sdk/src/plugins/OpencodeCLIOutputPlugin.test.ts new file mode 100644 index 00000000..ed51fbc8 --- /dev/null +++ b/sdk/src/plugins/OpencodeCLIOutputPlugin.test.ts @@ -0,0 +1,118 @@ +import type {OutputCleanContext, OutputWriteContext, SubAgentPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {OpencodeCLIOutputPlugin} from './OpencodeCLIOutputPlugin' +import {createLogger, FilePathKind, PromptKind} from './plugin-core' + +class TestOpencodeCLIOutputPlugin extends OpencodeCLIOutputPlugin { + constructor(private readonly testHomeDir: string) { + super() + } + + protected override getHomeDir(): string { + return this.testHomeDir + } +} + +function createCleanContext(): OutputCleanContext { + return { + logger: createLogger('OpencodeCLIOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + runtimeTargets: {}, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Relative, + path: '.', + basePath: '.', + getDirectoryName: () => '.', + getAbsolutePath: () => path.resolve('.') + }, + projects: [] + } + } + } as unknown as OutputCleanContext +} + +function createSubAgentPrompt(scope: 'project' | 'global'): SubAgentPrompt { + return { + type: PromptKind.SubAgent, + content: 'subagent body', + length: 13, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'subagents/reviewer.mdx', + basePath: path.resolve('tmp/dist/subagents'), + getDirectoryName: () => 'reviewer', + getAbsolutePath: () => path.resolve('tmp/dist/subagents/reviewer.mdx') + }, + agentPrefix: 'ops', + agentName: 'reviewer', + canonicalName: 'ops-reviewer', + yamlFrontMatter: { + description: 'Reviewer', + scope, + namingCase: 'kebab-case' + }, + markdownContents: [] + } as unknown as SubAgentPrompt +} + +describe('opencodeCLIOutputPlugin synthetic workspace project output', () => { + it('writes project-scoped subagents into workspace root .opencode/agents via the synthetic workspace project', async () => { + const workspaceBase = path.resolve('tmp/opencode-workspace') + const plugin = new OpencodeCLIOutputPlugin() + const ctx = { + logger: createLogger('OpencodeCLIOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + runtimeTargets: {}, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + { + name: '__workspace__', + isWorkspaceRootProject: true + } + ] + }, + subAgents: [createSubAgentPrompt('project')] + } + } as unknown as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + + expect(declarations.map(declaration => declaration.path)).toContain(path.join(workspaceBase, '.opencode', 'agents', 'ops-reviewer.md')) + expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) + }) +}) + +describe('opencodeCLIOutputPlugin cleanup', () => { + it('keeps global opencode.json out of cleanup delete targets', async () => { + const tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-opencode-cleanup-')) + + try { + const plugin = new TestOpencodeCLIOutputPlugin(tempHomeDir) + const cleanup = await plugin.declareCleanupPaths(createCleanContext()) + const deletePaths = cleanup.delete?.map(target => target.path.replaceAll('\\', '/')) ?? [] + + expect(deletePaths).toContain(path.join(tempHomeDir, '.config', 'opencode', 'AGENTS.md').replaceAll('\\', '/')) + expect(deletePaths).not.toContain(path.join(tempHomeDir, '.config', 'opencode', 'opencode.json').replaceAll('\\', '/')) + } finally { + fs.rmSync(tempHomeDir, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/plugins/OpencodeCLIOutputPlugin.ts b/sdk/src/plugins/OpencodeCLIOutputPlugin.ts new file mode 100644 index 00000000..72f6564a --- /dev/null +++ b/sdk/src/plugins/OpencodeCLIOutputPlugin.ts @@ -0,0 +1,499 @@ +import type {CommandPrompt, OutputFileDeclaration, OutputWriteContext, RulePrompt, SkillPrompt, SubAgentPrompt} from './plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import {AbstractOutputPlugin, applySubSeriesGlobPrefix, filterByProjectConfig, PLUGIN_NAMES} from './plugin-core' + +const GLOBAL_MEMORY_FILE = 'AGENTS.md' +const GLOBAL_CONFIG_DIR = '.config/opencode' +const OPENCODE_CONFIG_FILE = 'opencode.json' +const OPENCODE_RULES_PLUGIN_NAME = 'opencode-rules@latest' +const PROJECT_RULES_DIR = '.opencode' +const COMMANDS_SUBDIR = 'commands' +const AGENTS_SUBDIR = 'agents' +const SKILLS_SUBDIR = 'skills' +const RULES_SUBDIR = 'rules' + +type OpencodeOutputSource + = | {readonly kind: 'globalMemory', readonly content: string} + | {readonly kind: 'projectRootMemory', readonly content: string} + | {readonly kind: 'projectChildMemory', readonly content: string} + | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'subAgent', readonly agent: SubAgentPrompt} + | {readonly kind: 'skillMain', readonly skill: SkillPrompt, readonly normalizedSkillName: string} + | {readonly kind: 'skillReference', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | {readonly kind: 'mcpConfig', readonly mcpServers: Record>} + | {readonly kind: 'rule', readonly rule: RulePrompt} + +function transformOpencodeCommandFrontMatter( + _cmd: CommandPrompt, + context: { + readonly sourceFrontMatter?: Record + } +): Record { + const frontMatter: Record = {} + const source = context.sourceFrontMatter + + if (source?.['description'] != null) frontMatter['description'] = source['description'] + if (source?.['agent'] != null) frontMatter['agent'] = source['agent'] + if (source?.['model'] != null) frontMatter['model'] = source['model'] + + if (source?.['allowTools'] != null && Array.isArray(source['allowTools'])) { + const tools: Record = {} + for (const tool of source['allowTools']) tools[String(tool)] = true + frontMatter['tools'] = tools + } + + for (const [key, value] of Object.entries(source ?? {})) { + if (!['description', 'agent', 'model', 'allowTools', 'namingCase', 'argumentHint'].includes(key)) frontMatter[key] = value + } + + return frontMatter +} + +export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('OpencodeCLIOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: GLOBAL_MEMORY_FILE, + treatWorkspaceRootProjectAsProject: true, + commands: { + subDir: COMMANDS_SUBDIR, + transformFrontMatter: transformOpencodeCommandFrontMatter + }, + subagents: { + subDir: AGENTS_SUBDIR + }, + skills: { + subDir: SKILLS_SUBDIR + }, + rules: { + subDir: RULES_SUBDIR, + prefix: 'rule', + sourceScopes: ['project', 'global'] + }, + cleanup: { + delete: { + project: { + files: [GLOBAL_MEMORY_FILE, '.opencode/opencode.json'], + dirs: ['.opencode/commands', '.opencode/agents', '.opencode/skills', '.opencode/rules'] + }, + global: { + files: ['.config/opencode/AGENTS.md'], + dirs: ['.config/opencode/commands', '.config/opencode/agents', '.config/opencode/skills', '.config/opencode/rules'] + }, + xdgConfig: { + files: ['opencode/AGENTS.md'], + dirs: ['opencode/commands', 'opencode/agents', 'opencode/skills', 'opencode/rules'] + } + } + }, + dependsOn: [PLUGIN_NAMES.AgentsOutput], + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + rules: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'global'], + singleScope: true + }, + subagents: { + scopes: ['project', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'global'], + singleScope: true + }, + mcp: { + scopes: ['project', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {globalMemory, commands, subAgents, skills, rules} = ctx.collectedOutputContext + const globalDir = this.getGlobalConfigDir() + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) + const promptProjects = this.getProjectPromptOutputProjects(ctx) + const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) + const selectedCommands + = commands != null + ? this.selectSingleScopeItems( + commands, + this.commandsConfig.sourceScopes, + command => this.resolveCommandSourceScope(command), + this.getTopicScopeOverride(ctx, 'commands') + ) + : {items: [] as readonly CommandPrompt[]} + const selectedSubAgents + = subAgents != null + ? this.selectSingleScopeItems( + subAgents, + this.subAgentsConfig.sourceScopes, + subAgent => this.resolveSubAgentSourceScope(subAgent), + this.getTopicScopeOverride(ctx, 'subagents') + ) + : {items: [] as readonly SubAgentPrompt[]} + const selectedSkills + = skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + const selectedMcpSkills + = skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + + if (globalMemory != null && activePromptScopes.has('global')) { + declarations.push({ + path: path.join(globalDir, GLOBAL_MEMORY_FILE), + scope: 'global', + source: { + kind: 'globalMemory', + content: globalMemory.content as string + } satisfies OpencodeOutputSource + }) + } + + const pushSkillDeclarations = (basePath: string, scope: 'project' | 'global', filteredSkills: readonly SkillPrompt[]): void => { + for (const skill of filteredSkills) { + const normalizedSkillName = this.validateAndNormalizeSkillName(this.getSkillName(skill)) + const skillDir = path.join(basePath, SKILLS_SUBDIR, normalizedSkillName) + + declarations.push({ + path: path.join(skillDir, 'SKILL.md'), + scope, + source: { + kind: 'skillMain', + skill, + normalizedSkillName + } satisfies OpencodeOutputSource + }) + + if (skill.childDocs != null) { + for (const refDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, refDoc.dir.path.replace(/\.mdx$/, '.md')), + scope, + source: { + kind: 'skillReference', + content: refDoc.content as string + } satisfies OpencodeOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope, + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies OpencodeOutputSource + }) + } + } + } + } + + const pushMcpDeclaration = (basePath: string, scope: 'project' | 'global', _filteredSkills: readonly SkillPrompt[]): void => { + void _filteredSkills + declarations.push({ + path: path.join(basePath, OPENCODE_CONFIG_FILE), + scope, + source: { + kind: 'mcpConfig', + mcpServers: {} + } satisfies OpencodeOutputSource + }) + } + + const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) + for (const project of promptProjects) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + + if (project.rootMemoryPrompt != null && activePromptScopes.has('project')) { + declarations.push({ + path: path.join(projectRootDir, GLOBAL_MEMORY_FILE), + scope: 'project', + source: { + kind: 'projectRootMemory', + content: project.rootMemoryPrompt.content as string + } satisfies OpencodeOutputSource + }) + } + + if (project.childMemoryPrompts != null && activePromptScopes.has('project')) { + for (const child of project.childMemoryPrompts) { + declarations.push({ + path: this.resolveFullPath(child.dir), + scope: 'project', + source: { + kind: 'projectChildMemory', + content: child.content as string + } satisfies OpencodeOutputSource + }) + } + } + } + + if ( + selectedCommands.selectedScope === 'project' + || selectedSubAgents.selectedScope === 'project' + || selectedSkills.selectedScope === 'project' + || selectedMcpSkills.selectedScope === 'project' + ) { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + const basePath = path.join(projectRootDir, PROJECT_RULES_DIR) + + const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') + if (selectedCommands.selectedScope === 'project') { + for (const command of filteredCommands) { + declarations.push({ + path: path.join(basePath, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), + scope: 'project', + source: {kind: 'command', command} satisfies OpencodeOutputSource + }) + } + } + + const filteredSubAgents = filterByProjectConfig(selectedSubAgents.items, project.projectConfig, 'subAgents') + if (selectedSubAgents.selectedScope === 'project') { + for (const agent of filteredSubAgents) { + declarations.push({ + path: path.join(basePath, AGENTS_SUBDIR, this.transformSubAgentName(agent)), + scope: 'project', + source: {kind: 'subAgent', agent} satisfies OpencodeOutputSource + }) + } + } + + const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') + if (selectedSkills.selectedScope === 'project') pushSkillDeclarations(basePath, 'project', filteredSkills) + + if (selectedMcpSkills.selectedScope === 'project') { + const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, project.projectConfig, 'skills') + pushMcpDeclaration(basePath, 'project', filteredMcpSkills) + } + } + } + + if (selectedCommands.selectedScope === 'global') { + const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') + for (const command of filteredCommands) { + declarations.push({ + path: path.join(globalDir, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), + scope: 'global', + source: {kind: 'command', command} satisfies OpencodeOutputSource + }) + } + } + + if (selectedSubAgents.selectedScope === 'global') { + const filteredSubAgents = filterByProjectConfig(selectedSubAgents.items, promptSourceProjectConfig, 'subAgents') + for (const agent of filteredSubAgents) { + declarations.push({ + path: path.join(globalDir, AGENTS_SUBDIR, this.transformSubAgentName(agent)), + scope: 'global', + source: {kind: 'subAgent', agent} satisfies OpencodeOutputSource + }) + } + } + + if (selectedSkills.selectedScope === 'global') { + const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') + pushSkillDeclarations(globalDir, 'global', filteredSkills) + } + + if (selectedMcpSkills.selectedScope === 'global') { + const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, promptSourceProjectConfig, 'skills') + pushMcpDeclaration(globalDir, 'global', filteredMcpSkills) + } + + // Keep opencode.json managed so the generated config can preserve user fields + // while normalizing the MCP section to an empty object. + + if (rules == null || rules.length === 0) return declarations + + const activeRuleScopes = this.selectRuleScopes(ctx, rules) + for (const ruleScope of activeRuleScopes) { + if (ruleScope === 'global') { + const globalRules = rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'global') + for (const rule of globalRules) { + declarations.push({ + path: path.join(globalDir, RULES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'global', + source: {kind: 'rule', rule} satisfies OpencodeOutputSource + }) + } + } else if (ruleScope === 'project') { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + const basePath = path.join(projectRootDir, PROJECT_RULES_DIR) + + const projectRules = applySubSeriesGlobPrefix( + filterByProjectConfig( + rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'project'), + project.projectConfig, + 'rules' + ), + project.projectConfig + ) + for (const rule of projectRules) { + declarations.push({ + path: path.join(basePath, RULES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'project', + source: {kind: 'rule', rule} satisfies OpencodeOutputSource + }) + } + } + } + } + return declarations + } + + override async convertContent(declaration: OutputFileDeclaration, ctx: OutputWriteContext): Promise { + const source = declaration.source as OpencodeOutputSource + switch (source.kind) { + case 'globalMemory': + case 'projectRootMemory': + case 'projectChildMemory': + case 'skillReference': + return source.content + case 'command': + return this.buildCommandContent(source.command, ctx) + case 'subAgent': { + const frontMatter = this.buildOpencodeAgentFrontMatter(source.agent) + return this.buildMarkdownContent(source.agent.content, frontMatter, ctx) + } + case 'skillMain': { + const frontMatter = this.buildOpencodeSkillFrontMatter(source.skill, source.normalizedSkillName) + return this.buildMarkdownContent(source.skill.content as string, frontMatter, ctx) + } + case 'skillResource': + return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + case 'mcpConfig': + return JSON.stringify( + { + $schema: 'https://opencode.ai/config.json', + plugin: [OPENCODE_RULES_PLUGIN_NAME], + mcp: {} + }, + null, + 2 + ) + case 'rule': + return this.buildRuleContent(source.rule, ctx) + default: + throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + private buildOpencodeAgentFrontMatter(agent: SubAgentPrompt): Record { + const frontMatter: Record = {} + const source = agent.yamlFrontMatter as Record | undefined + + if (source?.['description'] != null) frontMatter['description'] = source['description'] + + frontMatter['mode'] = source?.['mode'] ?? 'subagent' + + if (source?.['model'] != null) frontMatter['model'] = source['model'] + if (source?.['temperature'] != null) frontMatter['temperature'] = source['temperature'] + if (source?.['maxSteps'] != null) frontMatter['maxSteps'] = source['maxSteps'] + if (source?.['hidden'] != null) frontMatter['hidden'] = source['hidden'] + + if (source?.['allowTools'] != null && Array.isArray(source['allowTools'])) { + const tools: Record = {} + for (const tool of source['allowTools']) tools[String(tool)] = true + frontMatter['tools'] = tools + } + + if (source?.['permission'] != null && typeof source['permission'] === 'object') frontMatter['permission'] = source['permission'] + + for (const [key, value] of Object.entries(source ?? {})) { + if (!['description', 'mode', 'model', 'temperature', 'maxSteps', 'hidden', 'allowTools', 'permission', 'namingCase', 'name', 'color'].includes(key)) { + frontMatter[key] = value + } + } + + return frontMatter + } + + private buildOpencodeSkillFrontMatter(skill: SkillPrompt, skillName: string): Record { + const frontMatter: Record = {} + const source = skill.yamlFrontMatter as Record | undefined + + frontMatter['name'] = skillName + if (source?.['description'] != null) frontMatter['description'] = source['description'] + + frontMatter['license'] = source?.['license'] ?? 'MIT' + frontMatter['compatibility'] = source?.['compatibility'] ?? 'opencode' + + const metadata: Record = {} + const metadataFields = ['author', 'version', 'keywords', 'category', 'repository', 'displayName'] + + for (const field of metadataFields) { + if (source?.[field] != null) metadata[field] = source[field] + } + + const reservedFields = new Set([ + 'name', + 'description', + 'license', + 'compatibility', + 'namingCase', + 'allowTools', + 'keywords', + 'displayName', + 'author', + 'version' + ]) + for (const [key, value] of Object.entries(source ?? {})) { + if (!reservedFields.has(key)) metadata[key] = value + } + + if (Object.keys(metadata).length > 0) frontMatter['metadata'] = metadata + + return frontMatter + } + + private validateAndNormalizeSkillName(name: string): string { + let normalized = name.toLowerCase() + normalized = normalized.replaceAll(/[^a-z0-9-]+/g, '-') + normalized = normalized.replaceAll(/-+/g, '-') + normalized = normalized.replaceAll(/^-|-$/g, '') + + if (normalized.length === 0) normalized = 'skill' + else if (normalized.length > 64) { + normalized = normalized.slice(0, 64) + normalized = normalized.replace(/-$/, '') + } + + return normalized + } +} diff --git a/sdk/src/plugins/PromptMarkdownCleanup.test.ts b/sdk/src/plugins/PromptMarkdownCleanup.test.ts new file mode 100644 index 00000000..032e5797 --- /dev/null +++ b/sdk/src/plugins/PromptMarkdownCleanup.test.ts @@ -0,0 +1,259 @@ +import type { + OutputCleanContext, + OutputPlugin, + ProjectChildrenMemoryPrompt, + ProjectRootMemoryPrompt +} from './plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {collectDeletionTargets} from '../commands/CleanupUtils' +import {AgentsOutputPlugin} from './AgentsOutputPlugin' +import {ClaudeCodeCLIOutputPlugin} from './ClaudeCodeCLIOutputPlugin' +import {GeminiCLIOutputPlugin} from './GeminiCLIOutputPlugin' +import {FilePathKind, PromptKind} from './plugin-core' + +interface CleanupTestCase { + readonly name: string + readonly fileName: string + readonly createPlugin: () => OutputPlugin +} + +const TEST_CASES: readonly CleanupTestCase[] = [ + { + name: 'AgentsOutputPlugin', + fileName: 'AGENTS.md', + createPlugin: () => new AgentsOutputPlugin() + }, + { + name: 'ClaudeCodeCLIOutputPlugin', + fileName: 'CLAUDE.md', + createPlugin: () => new ClaudeCodeCLIOutputPlugin() + }, + { + name: 'GeminiCLIOutputPlugin', + fileName: 'GEMINI.md', + createPlugin: () => new GeminiCLIOutputPlugin() + } +] + +function createRootPrompt(content: string): ProjectRootMemoryPrompt { + return { + type: PromptKind.ProjectRootMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + }, + markdownContents: [] + } as ProjectRootMemoryPrompt +} + +function createChildPrompt( + workspaceDir: string, + projectName: string, + relativePath: string, + content: string +): ProjectChildrenMemoryPrompt { + return { + type: PromptKind.ProjectChildrenMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + markdownContents: [], + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceDir, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceDir, projectName, relativePath) + }, + workingChildDirectoryPath: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceDir, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceDir, projectName, relativePath) + } + } as ProjectChildrenMemoryPrompt +} + +function createCleanContext(workspaceDir: string): OutputCleanContext { + return { + logger: { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + }, + fs, + path, + glob, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [ + { + name: '__workspace__', + isWorkspaceRootProject: true, + rootMemoryPrompt: createRootPrompt('workspace root') + }, + { + name: 'aindex', + isPromptSourceProject: true, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'aindex', + basePath: workspaceDir, + getDirectoryName: () => 'aindex', + getAbsolutePath: () => path.join(workspaceDir, 'aindex') + }, + rootMemoryPrompt: createRootPrompt('prompt-source root'), + childMemoryPrompts: [ + createChildPrompt( + workspaceDir, + 'aindex', + 'commands', + 'prompt-source child' + ) + ] + }, + { + name: 'project-a', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + }, + rootMemoryPrompt: createRootPrompt('project root'), + childMemoryPrompts: [ + createChildPrompt( + workspaceDir, + 'project-a', + 'commands', + 'project child' + ) + ] + } + ] + } + } + } as OutputCleanContext +} + +describe.each(TEST_CASES)('$name cleanup', ({fileName, createPlugin}) => { + it('cleans workspace and non-prompt project markdown outputs without touching prompt-source paths', async () => { + const tempDir = fs.mkdtempSync( + path.join(os.tmpdir(), `tnmsc-${fileName.toLowerCase()}-cleanup-`) + ) + const workspaceDir = path.join(tempDir, 'workspace') + const workspaceFile = path.join(workspaceDir, fileName) + const promptSourceRootFile = path.join(workspaceDir, 'aindex', fileName) + const promptSourceChildFile = path.join( + workspaceDir, + 'aindex', + 'commands', + fileName + ) + const projectRootFile = path.join(workspaceDir, 'project-a', fileName) + const projectChildFile = path.join( + workspaceDir, + 'project-a', + 'commands', + fileName + ) + const manualProjectChildFile = path.join( + workspaceDir, + 'project-a', + 'docs', + fileName + ) + + fs.mkdirSync(path.dirname(promptSourceChildFile), {recursive: true}) + fs.mkdirSync(path.dirname(manualProjectChildFile), {recursive: true}) + fs.mkdirSync(path.dirname(projectChildFile), {recursive: true}) + fs.writeFileSync(workspaceFile, '# workspace', 'utf8') + fs.writeFileSync(promptSourceRootFile, '# prompt-source root', 'utf8') + fs.writeFileSync(promptSourceChildFile, '# prompt-source child', 'utf8') + fs.writeFileSync(projectRootFile, '# project root', 'utf8') + fs.writeFileSync(projectChildFile, '# project child', 'utf8') + fs.writeFileSync(manualProjectChildFile, '# manual child', 'utf8') + + try { + const result = await collectDeletionTargets( + [createPlugin()], + createCleanContext(workspaceDir) + ) + const normalizedFilesToDelete = result.filesToDelete.map(target => + target.replaceAll('\\', '/')) + + expect(normalizedFilesToDelete).toEqual( + expect.arrayContaining([ + workspaceFile.replaceAll('\\', '/'), + projectRootFile.replaceAll('\\', '/'), + projectChildFile.replaceAll('\\', '/'), + manualProjectChildFile.replaceAll('\\', '/') + ]) + ) + expect(normalizedFilesToDelete).not.toContain( + promptSourceRootFile.replaceAll('\\', '/') + ) + expect(normalizedFilesToDelete).not.toContain( + promptSourceChildFile.replaceAll('\\', '/') + ) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) +}) + +describe('claudeCodeCLIOutputPlugin cleanup', () => { + it('keeps project-scope .claude cleanup directories registered', async () => { + const tempDir = fs.mkdtempSync( + path.join(os.tmpdir(), 'tnmsc-claude-cleanup-') + ) + const workspaceDir = path.join(tempDir, 'workspace') + const projectClaudeDirs = [ + path.join(workspaceDir, 'project-a', '.claude', 'rules'), + path.join(workspaceDir, 'project-a', '.claude', 'commands'), + path.join(workspaceDir, 'project-a', '.claude', 'agents'), + path.join(workspaceDir, 'project-a', '.claude', 'skills') + ] + + for (const directory of projectClaudeDirs) { + fs.mkdirSync(directory, {recursive: true}) + } + + try { + const result = await collectDeletionTargets( + [new ClaudeCodeCLIOutputPlugin()], + createCleanContext(workspaceDir) + ) + const normalizedDirsToDelete = result.dirsToDelete.map(target => + target.replaceAll('\\', '/')) + + expect(normalizedDirsToDelete).toEqual( + expect.arrayContaining( + projectClaudeDirs.map(target => target.replaceAll('\\', '/')) + ) + ) + } finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/plugins/QoderIDEPluginOutputPlugin.test.ts b/sdk/src/plugins/QoderIDEPluginOutputPlugin.test.ts new file mode 100644 index 00000000..9ab63746 --- /dev/null +++ b/sdk/src/plugins/QoderIDEPluginOutputPlugin.test.ts @@ -0,0 +1,396 @@ +import type {CommandPrompt, GlobalMemoryPrompt, OutputWriteContext, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt, RulePrompt, SkillPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {createLogger, FilePathKind, PromptKind} from './plugin-core' +import {QoderIDEPluginOutputPlugin} from './QoderIDEPluginOutputPlugin' + +class TestQoderIDEPluginOutputPlugin extends QoderIDEPluginOutputPlugin { + constructor(private readonly testHomeDir: string) { + super() + } + + protected override getHomeDir(): string { + return this.testHomeDir + } +} + +function createWorkspaceRootPrompt(): ProjectRootMemoryPrompt { + return { + type: PromptKind.ProjectRootMemory, + content: 'workspace root prompt', + length: 21, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + }, + markdownContents: [] + } as ProjectRootMemoryPrompt +} + +function createProjectRootPrompt(content: string): ProjectRootMemoryPrompt { + return { + type: PromptKind.ProjectRootMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + }, + markdownContents: [] + } as ProjectRootMemoryPrompt +} + +function createChildPrompt(relativePath: string, content: string): ProjectChildrenMemoryPrompt { + return { + type: PromptKind.ProjectChildrenMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + markdownContents: [], + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.resolve('tmp/qoder-dist/app'), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.resolve('tmp/qoder-dist/app', relativePath) + }, + workingChildDirectoryPath: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.resolve('tmp/qoder-workspace/project'), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.resolve('tmp/qoder-workspace/project', relativePath) + } + } as ProjectChildrenMemoryPrompt +} + +function createGlobalMemoryPrompt(): GlobalMemoryPrompt { + return { + type: PromptKind.GlobalMemory, + content: 'global prompt', + length: 13, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'aindex/dist/global.mdx', + basePath: path.resolve('.'), + getDirectoryName: () => 'dist', + getAbsolutePath: () => path.resolve('aindex/dist/global.mdx') + }, + markdownContents: [] + } as GlobalMemoryPrompt +} + +function createCommandPrompt(): CommandPrompt { + return { + type: PromptKind.Command, + content: 'command body', + length: 12, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'commands/dev/build.mdx', + basePath: path.resolve('tmp/dist/commands'), + getDirectoryName: () => 'dev', + getAbsolutePath: () => path.resolve('tmp/dist/commands/dev/build.mdx') + }, + commandPrefix: 'dev', + commandName: 'build', + yamlFrontMatter: { + description: 'Build command', + scope: 'project' + }, + markdownContents: [] + } as CommandPrompt +} + +function createSkillPrompt( + scope: 'project' | 'global' = 'project', + name: string = 'ship-it' +): SkillPrompt { + return { + type: PromptKind.Skill, + content: 'skill body', + length: 10, + filePathKind: FilePathKind.Relative, + skillName: name, + dir: { + pathKind: FilePathKind.Relative, + path: `skills/${name}`, + basePath: path.resolve('tmp/dist/skills'), + getDirectoryName: () => name, + getAbsolutePath: () => path.resolve('tmp/dist/skills', name) + }, + yamlFrontMatter: { + description: 'Skill description', + scope + }, + mcpConfig: { + type: PromptKind.SkillMcpConfig, + mcpServers: { + inspector: { + command: 'npx', + args: ['inspector'] + } + }, + rawContent: '{"mcpServers":{"inspector":{"command":"npx","args":["inspector"]}}}' + }, + markdownContents: [] + } as SkillPrompt +} + +function createRulePrompt(scope: 'project' | 'global' = 'project'): RulePrompt { + return { + type: PromptKind.Rule, + content: 'rule body', + length: 9, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'rules/ops/guard.mdx', + basePath: path.resolve('tmp/dist/rules'), + getDirectoryName: () => 'ops', + getAbsolutePath: () => path.resolve('tmp/dist/rules/ops/guard.mdx') + }, + prefix: 'ops', + ruleName: 'guard', + globs: ['src/**'], + scope, + markdownContents: [] + } as RulePrompt +} + +describe('qoderIDEPluginOutputPlugin synthetic workspace project output', () => { + it('writes workspace-root prompt, rules, commands, and skills through the synthetic workspace project', async () => { + const workspaceBase = path.resolve('tmp/qoder-workspace') + const plugin = new QoderIDEPluginOutputPlugin() + const ctx = { + logger: createLogger('QoderIDEPluginOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [{ + name: '__workspace__', + isWorkspaceRootProject: true, + rootMemoryPrompt: createWorkspaceRootPrompt() + }] + }, + commands: [createCommandPrompt()], + skills: [createSkillPrompt()], + rules: [createRulePrompt('project')] + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(workspaceBase, '.qoder', 'rules', 'always.md')) + expect(paths).toContain(path.join(workspaceBase, '.qoder', 'rules', 'rule-ops-guard.md')) + expect(paths).toContain(path.join(workspaceBase, '.qoder', 'commands', 'dev-build.md')) + expect(paths).toContain(path.join(workspaceBase, '.qoder', 'skills', 'ship-it', 'SKILL.md')) + expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) + }) + + it('skips prompt-derived rules for the prompt-source project but still keeps real project rules', async () => { + const workspaceBase = path.resolve('tmp/qoder-prompt-source') + const plugin = new QoderIDEPluginOutputPlugin() + const ctx = { + logger: createLogger('QoderIDEPluginOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + { + name: 'aindex', + isPromptSourceProject: true, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'aindex', + basePath: workspaceBase, + getDirectoryName: () => 'aindex', + getAbsolutePath: () => path.join(workspaceBase, 'aindex') + }, + rootMemoryPrompt: createProjectRootPrompt('prompt-source root'), + childMemoryPrompts: [createChildPrompt('commands', 'prompt-source child')] + }, + { + name: 'project-a', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceBase, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceBase, 'project-a') + }, + rootMemoryPrompt: createProjectRootPrompt('project root'), + childMemoryPrompts: [createChildPrompt('commands', 'project child')] + } + ] + }, + globalMemory: createGlobalMemoryPrompt(), + rules: [createRulePrompt('project')] + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).not.toContain(path.join(workspaceBase, 'aindex', '.qoder', 'rules', 'global.md')) + expect(paths).not.toContain(path.join(workspaceBase, 'aindex', '.qoder', 'rules', 'always.md')) + expect(paths).not.toContain(path.join(workspaceBase, 'aindex', '.qoder', 'rules', 'glob-commands.md')) + expect(paths).toContain(path.join(workspaceBase, 'aindex', '.qoder', 'rules', 'rule-ops-guard.md')) + expect(paths).toContain(path.join(workspaceBase, 'project-a', '.qoder', 'rules', 'global.md')) + expect(paths).toContain(path.join(workspaceBase, 'project-a', '.qoder', 'rules', 'always.md')) + expect(paths).toContain(path.join(workspaceBase, 'project-a', '.qoder', 'rules', 'glob-commands.md')) + expect(paths).toContain(path.join(workspaceBase, 'project-a', '.qoder', 'rules', 'rule-ops-guard.md')) + }) + + it('keeps skill files global when only mcp is project-scoped', async () => { + const workspaceBase = path.resolve('tmp/qoder-split-scope-project-mcp') + const homeDir = path.join(workspaceBase, 'home') + const plugin = new TestQoderIDEPluginOutputPlugin(homeDir) + const ctx = { + logger: createLogger('QoderIDEPluginOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + pluginOptions: { + outputScopes: { + plugins: { + QoderIDEPluginOutputPlugin: { + skills: 'global', + mcp: 'project' + } + } + } + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [{ + name: '__workspace__', + isWorkspaceRootProject: true + }] + }, + skills: [ + createSkillPrompt('project', 'inspect-locally'), + createSkillPrompt('global', 'ship-it') + ] + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(homeDir, '.qoder', 'skills', 'ship-it', 'SKILL.md')) + expect(paths).toContain(path.join(workspaceBase, '.qoder', 'skills', 'inspect-locally', 'mcp.json')) + expect(paths).not.toContain(path.join(workspaceBase, '.qoder', 'skills', 'ship-it', 'SKILL.md')) + expect(paths).not.toContain(path.join(homeDir, '.qoder', 'skills', 'inspect-locally', 'SKILL.md')) + }) + + it('keeps skill files project-scoped when only mcp is global-scoped', async () => { + const workspaceBase = path.resolve('tmp/qoder-split-scope-global-mcp') + const homeDir = path.join(workspaceBase, 'home') + const plugin = new TestQoderIDEPluginOutputPlugin(homeDir) + const ctx = { + logger: createLogger('QoderIDEPluginOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + pluginOptions: { + outputScopes: { + plugins: { + QoderIDEPluginOutputPlugin: { + skills: 'project', + mcp: 'global' + } + } + } + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [{ + name: '__workspace__', + isWorkspaceRootProject: true + }] + }, + skills: [ + createSkillPrompt('project', 'ship-it'), + createSkillPrompt('global', 'inspect-globally') + ] + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(workspaceBase, '.qoder', 'skills', 'ship-it', 'SKILL.md')) + expect(paths).toContain(path.join(homeDir, '.qoder', 'skills', 'inspect-globally', 'mcp.json')) + expect(paths).not.toContain(path.join(homeDir, '.qoder', 'skills', 'ship-it', 'SKILL.md')) + expect(paths).not.toContain(path.join(workspaceBase, '.qoder', 'skills', 'inspect-globally', 'SKILL.md')) + }) + + it('writes the global prompt to workspace root through the synthetic workspace project', async () => { + const workspaceBase = path.resolve('tmp/qoder-workspace-global-prompt') + const plugin = new QoderIDEPluginOutputPlugin() + const ctx = { + logger: createLogger('QoderIDEPluginOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [{ + name: '__workspace__', + isWorkspaceRootProject: true + }] + }, + globalMemory: createGlobalMemoryPrompt() + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + + expect(declarations.map(declaration => declaration.path)).toContain( + path.join(workspaceBase, '.qoder', 'rules', 'global.md') + ) + }) +}) diff --git a/sdk/src/plugins/QoderIDEPluginOutputPlugin.ts b/sdk/src/plugins/QoderIDEPluginOutputPlugin.ts new file mode 100644 index 00000000..0ae4d6ad --- /dev/null +++ b/sdk/src/plugins/QoderIDEPluginOutputPlugin.ts @@ -0,0 +1,419 @@ +import type { + CommandPrompt, + OutputFileDeclaration, + OutputWriteContext, + ProjectChildrenMemoryPrompt, + RulePrompt, + RuleScope, + SkillPrompt +} from './plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import {AbstractOutputPlugin, applySubSeriesGlobPrefix, filterByProjectConfig} from './plugin-core' + +const QODER_CONFIG_DIR = '.qoder' +const RULES_SUBDIR = 'rules' +const COMMANDS_SUBDIR = 'commands' +const SKILLS_SUBDIR = 'skills' +const GLOBAL_RULE_FILE = 'global.md' +const PROJECT_RULE_FILE = 'always.md' +const CHILD_RULE_FILE_PREFIX = 'glob-' +const SKILL_FILE_NAME = 'SKILL.md' +const MCP_CONFIG_FILE = 'mcp.json' +const TRIGGER_ALWAYS = 'always_on' +const TRIGGER_GLOB = 'glob' +const RULE_GLOB_KEY = 'glob' +const RULE_FILE_PREFIX = 'rule-' + +type QoderOutputSource + = | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'ruleContent', readonly content: string} + | {readonly kind: 'rulePrompt', readonly rule: RulePrompt} + | {readonly kind: 'skillMain', readonly skill: SkillPrompt} + | {readonly kind: 'skillMcpConfig', readonly rawContent: string} + | {readonly kind: 'skillChildDoc', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | {readonly kind: 'ignoreFile', readonly content: string} + +function transformQoderCommandFrontMatter( + _cmd: CommandPrompt, + context: { + readonly sourceFrontMatter?: Record + } +): Record { + const source = context.sourceFrontMatter + + const frontMatter: Record = { + description: 'Fast command', + type: 'user_command' + } + + if (source?.['description'] != null) frontMatter['description'] = source['description'] + if (source?.['argumentHint'] != null) frontMatter['argumentHint'] = source['argumentHint'] + if (source?.['allowTools'] != null && Array.isArray(source['allowTools']) && source['allowTools'].length > 0) frontMatter['allowTools'] = source['allowTools'] + + return frontMatter +} + +export class QoderIDEPluginOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('QoderIDEPluginOutputPlugin', { + globalConfigDir: QODER_CONFIG_DIR, + treatWorkspaceRootProjectAsProject: true, + indexignore: '.qoderignore', + commands: { + subDir: COMMANDS_SUBDIR, + transformFrontMatter: transformQoderCommandFrontMatter + }, + skills: { + subDir: SKILLS_SUBDIR + }, + rules: { + subDir: RULES_SUBDIR, + sourceScopes: ['project', 'global'] + }, + cleanup: { + delete: { + project: { + dirs: ['.qoder/commands', '.qoder/rules', '.qoder/skills'] + }, + global: { + dirs: ['.qoder/commands', '.qoder/rules', '.qoder/skills'] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + rules: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'global'], + singleScope: true + }, + mcp: { + scopes: ['project', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {globalMemory, commands, skills, rules, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const globalDir = this.getGlobalConfigDir() + const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) + const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) + const activeRuleScopes = new Set(rules != null ? this.selectRuleScopes(ctx, rules) : []) + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) + const promptProjects = this.getProjectPromptOutputProjects(ctx) + const selectedCommands = commands != null + ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, command => this.resolveCommandSourceScope(command), this.getTopicScopeOverride(ctx, 'commands')) + : {items: [] as readonly CommandPrompt[]} + const selectedSkills = skills != null + ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) + : {items: [] as readonly SkillPrompt[]} + const selectedMcpSkills = skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + + const pushSkillDeclarations = ( + baseDir: string, + scope: 'project' | 'global', + filteredSkills: readonly SkillPrompt[] + ): void => { + for (const skill of filteredSkills) { + const skillName = this.getSkillName(skill) + const skillDir = path.join(baseDir, SKILLS_SUBDIR, skillName) + declarations.push({ + path: path.join(skillDir, SKILL_FILE_NAME), + scope, + source: {kind: 'skillMain', skill} satisfies QoderOutputSource + }) + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), + scope, + source: { + kind: 'skillChildDoc', + content: childDoc.content as string + } satisfies QoderOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope, + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies QoderOutputSource + }) + } + } + } + } + + const pushSkillMcpDeclarations = ( + baseDir: string, + scope: 'project' | 'global', + filteredMcpSkills: readonly SkillPrompt[] + ): void => { + for (const skill of filteredMcpSkills) { + if (skill.mcpConfig == null) continue + + const skillDir = path.join(baseDir, SKILLS_SUBDIR, this.getSkillName(skill)) + declarations.push({ + path: path.join(skillDir, MCP_CONFIG_FILE), + scope, + source: { + kind: 'skillMcpConfig', + rawContent: skill.mcpConfig.rawContent + } satisfies QoderOutputSource + }) + } + } + + if (selectedCommands.selectedScope === 'project') { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectBase = this.resolveProjectConfigDir(ctx, project) + if (projectBase == null) continue + + const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') + for (const command of filteredCommands) { + declarations.push({ + path: path.join(projectBase, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), + scope: 'project', + source: {kind: 'command', command} satisfies QoderOutputSource + }) + } + } + } + + if (selectedCommands.selectedScope === 'global') { + const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') + for (const command of filteredCommands) { + declarations.push({ + path: path.join(globalDir, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), + scope: 'global', + source: {kind: 'command', command} satisfies QoderOutputSource + }) + } + } + + if (selectedSkills.selectedScope === 'project' || selectedMcpSkills.selectedScope === 'project') { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectBase = this.resolveProjectConfigDir(ctx, project) + if (projectBase == null) continue + + if (selectedSkills.selectedScope === 'project') { + const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') + pushSkillDeclarations(projectBase, 'project', filteredSkills) + } + + if (selectedMcpSkills.selectedScope === 'project') { + const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, project.projectConfig, 'skills') + pushSkillMcpDeclarations(projectBase, 'project', filteredMcpSkills) + } + } + } + + if (selectedSkills.selectedScope === 'global' || selectedMcpSkills.selectedScope === 'global') { + if (selectedSkills.selectedScope === 'global') { + const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') + pushSkillDeclarations(globalDir, 'global', filteredSkills) + } + + if (selectedMcpSkills.selectedScope === 'global') { + const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, promptSourceProjectConfig, 'skills') + pushSkillMcpDeclarations(globalDir, 'global', filteredMcpSkills) + } + } + + if (globalMemory != null && activePromptScopes.has('global')) { + for (const project of promptProjects) { + const projectBase = this.resolveProjectConfigDir(ctx, project) + if (projectBase == null) continue + declarations.push({ + path: path.join(projectBase, RULES_SUBDIR, GLOBAL_RULE_FILE), + scope: 'project', + source: { + kind: 'ruleContent', + content: this.buildAlwaysRuleContent(globalMemory.content as string, ctx) + } satisfies QoderOutputSource + }) + } + } + + if (activePromptScopes.has('project')) { + for (const project of promptProjects) { + const projectBase = this.resolveProjectConfigDir(ctx, project) + if (projectBase == null) continue + + if (project.rootMemoryPrompt != null) { + declarations.push({ + path: path.join(projectBase, RULES_SUBDIR, PROJECT_RULE_FILE), + scope: 'project', + source: { + kind: 'ruleContent', + content: this.buildAlwaysRuleContent(project.rootMemoryPrompt.content as string, ctx) + } satisfies QoderOutputSource + }) + } + + if (project.childMemoryPrompts != null) { + for (const child of project.childMemoryPrompts) { + declarations.push({ + path: path.join(projectBase, RULES_SUBDIR, this.buildChildRuleFileName(child)), + scope: 'project', + source: { + kind: 'ruleContent', + content: this.buildGlobRuleContent(child, ctx) + } satisfies QoderOutputSource + }) + } + } + } + } + + if (rules != null && rules.length > 0 && activeRuleScopes.has('project')) { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectBase = this.resolveProjectConfigDir(ctx, project) + if (projectBase == null) continue + + const projectRules = applySubSeriesGlobPrefix( + filterByProjectConfig(rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'project'), project.projectConfig, 'rules'), + project.projectConfig + ) + for (const rule of projectRules) { + declarations.push({ + path: path.join(projectBase, RULES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'project', + source: {kind: 'rulePrompt', rule} satisfies QoderOutputSource + }) + } + } + } + + if (rules != null && rules.length > 0 && activeRuleScopes.has('global')) { + const globalRules = rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'global') + for (const rule of globalRules) { + declarations.push({ + path: path.join(globalDir, RULES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'global', + source: {kind: 'rulePrompt', rule} satisfies QoderOutputSource + }) + } + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of this.getConcreteProjects(ctx)) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) continue + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: { + kind: 'ignoreFile', + content: ignoreFile.content + } satisfies QoderOutputSource + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + const source = declaration.source as QoderOutputSource + switch (source.kind) { + case 'command': return this.buildCommandContent(source.command, ctx) + case 'ruleContent': return source.content + case 'rulePrompt': return this.buildRuleContent(source.rule, ctx) + case 'skillMain': { + const fmData = this.buildSkillFrontMatter(source.skill) + return this.buildMarkdownContent(source.skill.content as string, fmData, ctx) + } + case 'skillMcpConfig': return source.rawContent + case 'skillChildDoc': + case 'ignoreFile': return source.content + case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + default: throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + private buildChildRuleFileName(child: ProjectChildrenMemoryPrompt): string { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '').replaceAll('/', '-') + return `${CHILD_RULE_FILE_PREFIX}${normalized.length > 0 ? normalized : 'root'}.md` + } + + private buildAlwaysRuleContent(content: string, ctx: OutputWriteContext): string { + return this.buildMarkdownContent(content, {trigger: TRIGGER_ALWAYS, type: 'user_command'}, ctx) + } + + private buildGlobRuleContent(child: ProjectChildrenMemoryPrompt, ctx: OutputWriteContext): string { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '') + const pattern = normalized.length === 0 ? '**/*' : `${normalized}/**` + return this.buildMarkdownContent(child.content as string, {trigger: TRIGGER_GLOB, [RULE_GLOB_KEY]: pattern, type: 'user_command'}, ctx) + } + + protected override buildSkillFrontMatter(skill: SkillPrompt): Record { + const fm = skill.yamlFrontMatter + return { + name: this.getSkillName(skill), + description: fm.description, + type: 'user_command', + ...fm.displayName != null && {displayName: fm.displayName}, + ...fm.keywords != null && fm.keywords.length > 0 && {keywords: fm.keywords}, + ...fm.author != null && {author: fm.author}, + ...fm.version != null && {version: fm.version}, + ...fm.allowTools != null && fm.allowTools.length > 0 && {allowTools: fm.allowTools} + } + } + + protected override buildRuleFileName(rule: RulePrompt, prefix: string = RULE_FILE_PREFIX): string { + return `${prefix}${rule.prefix}-${rule.ruleName}.md` + } + + protected override buildRuleContent(rule: RulePrompt, ctx?: OutputWriteContext): string { + const fmData: Record = { + trigger: TRIGGER_GLOB, + [RULE_GLOB_KEY]: rule.globs.length > 0 ? rule.globs.join(', ') : '**/*', + type: 'user_command' + } + return this.buildMarkdownContent(rule.content, fmData, ctx) + } + + protected override normalizeRuleScope(rule: RulePrompt): RuleScope { + return rule.scope ?? 'global' + } +} diff --git a/sdk/src/plugins/ReadmeMdConfigFileOutputPlugin.ts b/sdk/src/plugins/ReadmeMdConfigFileOutputPlugin.ts new file mode 100644 index 00000000..0ae873d9 --- /dev/null +++ b/sdk/src/plugins/ReadmeMdConfigFileOutputPlugin.ts @@ -0,0 +1,72 @@ +import type { + OutputFileDeclaration, + OutputWriteContext, + ReadmeFileKind +} from './plugin-core' + +import * as path from 'node:path' +import {AbstractOutputPlugin, README_FILE_KIND_MAP} from './plugin-core' + +function resolveOutputFileName(fileKind?: ReadmeFileKind): string { + return README_FILE_KIND_MAP[fileKind ?? 'Readme'].out +} + +/** + * Output plugin for writing readme-family files to project directories. + * Reads README prompts collected by ReadmeMdInputCapability and writes them + * to the corresponding project directories. + * + * Output mapping: + * - fileKind=Readme → README.md + * - fileKind=CodeOfConduct → CODE_OF_CONDUCT.md + * - fileKind=Security → SECURITY.md + * + * Supports: + * - Root files (written to project root) + * - Child files (written to project subdirectories) + * - Dry-run mode (preview without writing) + * - Clean operation (delete generated files) + */ +export class ReadmeMdConfigFileOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('ReadmeMdConfigFileOutputPlugin', { + outputFileName: 'README.md', + cleanup: { + delete: { + project: { + files: ['README.md', 'CODE_OF_CONDUCT.md', 'SECURITY.md'] + } + } + }, + capabilities: {} + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {readmePrompts} = ctx.collectedOutputContext + if (readmePrompts == null || readmePrompts.length === 0) return declarations + + for (const readme of readmePrompts) { + const outputFileName = resolveOutputFileName(readme.fileKind) + const filePath = path.join(readme.targetDir.basePath, readme.targetDir.path, outputFileName) + declarations.push({ + path: filePath, + scope: 'project', + source: {content: readme.content as string} + }) + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + void ctx + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } +} diff --git a/sdk/src/plugins/TraeCNIDEOutputPlugin.ts b/sdk/src/plugins/TraeCNIDEOutputPlugin.ts new file mode 100644 index 00000000..751242c7 --- /dev/null +++ b/sdk/src/plugins/TraeCNIDEOutputPlugin.ts @@ -0,0 +1,60 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin} from './plugin-core' + +const GLOBAL_MEMORY_FILE = 'GLOBAL.md' +const GLOBAL_CONFIG_DIR = '.trae-cn' +const USER_RULES_SUBDIR = 'user_rules' + +export class TraeCNIDEOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('TraeCNIDEOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: GLOBAL_MEMORY_FILE, + dependsOn: ['TraeIDEOutputPlugin'], + cleanup: { + delete: { + global: { + dirs: ['.trae-cn/user_rules'] + } + } + }, + capabilities: { + prompt: { + scopes: ['global'], + singleScope: false + } + } + }) + } + + private getGlobalUserRulesDir(): string { + return this.joinPath(this.getGlobalConfigDir(), USER_RULES_SUBDIR) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['global'])) + if (!activePromptScopes.has('global')) return [] + + const {globalMemory} = ctx.collectedOutputContext + if (globalMemory == null) return [] + + return [{ + path: this.joinPath(this.getGlobalUserRulesDir(), GLOBAL_MEMORY_FILE), + scope: 'global', + source: {content: globalMemory.content as string} + }] + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + void ctx + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } +} diff --git a/sdk/src/plugins/TraeIDEOutputPlugin.test.ts b/sdk/src/plugins/TraeIDEOutputPlugin.test.ts new file mode 100644 index 00000000..54835979 --- /dev/null +++ b/sdk/src/plugins/TraeIDEOutputPlugin.test.ts @@ -0,0 +1,125 @@ +import type {OutputWriteContext, ProjectChildrenMemoryPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {createLogger, FilePathKind, PromptKind} from './plugin-core' +import {TraeIDEOutputPlugin} from './TraeIDEOutputPlugin' + +function createChildPrompt(relativePath: string, content: string): ProjectChildrenMemoryPrompt { + return { + type: PromptKind.ProjectChildrenMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + markdownContents: [], + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.resolve('tmp/dist/app'), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.resolve('tmp/dist/app', relativePath) + }, + workingChildDirectoryPath: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.resolve('tmp/workspace/project'), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.resolve('tmp/workspace/project', relativePath) + } + } as ProjectChildrenMemoryPrompt +} + +describe('traeIDEOutputPlugin steering rule output', () => { + it('emits project-relative glob and injects output-dir scope guard', async () => { + const plugin = new TraeIDEOutputPlugin() + const workspaceBase = path.resolve('tmp/trae-plugin-test') + const ctx = { + logger: createLogger('TraeIDEOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + { + name: 'project-a', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceBase, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceBase, 'project-a') + }, + childMemoryPrompts: [createChildPrompt('commands', 'Rule body')] + } + ] + } + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const steering = declarations.find(d => d.source != null && (d.source as {kind?: string}).kind === 'steeringRule') + expect(steering).toBeDefined() + if (steering == null) throw new Error('Expected steering declaration') + + const {content} = steering.source as {content: string} + expect(content).toContain('globs: commands/**') + expect(content).toContain('Scope guard: this rule is for the project-root path "commands/" only.') + expect(content).toContain('Do not apply this rule to generated output paths such as "dist/commands/"') + expect(content).toContain('globs: commands/**\n---\n\nScope guard:') + }) + + it('honors frontMatter.blankLineAfter=false for prebuilt steering rule content', async () => { + const plugin = new TraeIDEOutputPlugin() + const workspaceBase = path.resolve('tmp/trae-plugin-test-no-blank-line') + const ctx = { + logger: createLogger('TraeIDEOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + pluginOptions: { + frontMatter: { + blankLineAfter: false + } + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + { + name: 'project-a', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceBase, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceBase, 'project-a') + }, + childMemoryPrompts: [createChildPrompt('commands', 'Rule body')] + } + ] + } + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const steering = declarations.find(d => d.source != null && (d.source as {kind?: string}).kind === 'steeringRule') + expect(steering).toBeDefined() + if (steering == null) throw new Error('Expected steering declaration') + + const {content} = steering.source as {content: string} + expect(content).toContain('---\nScope guard:') + expect(content).not.toContain('---\n\nScope guard:') + }) +}) diff --git a/sdk/src/plugins/TraeIDEOutputPlugin.ts b/sdk/src/plugins/TraeIDEOutputPlugin.ts new file mode 100644 index 00000000..691cea41 --- /dev/null +++ b/sdk/src/plugins/TraeIDEOutputPlugin.ts @@ -0,0 +1,295 @@ +import type { + CommandPrompt, + OutputFileDeclaration, + OutputWriteContext, + ProjectChildrenMemoryPrompt, + SkillPrompt +} from './plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import {AbstractOutputPlugin, filterByProjectConfig} from './plugin-core' + +const GLOBAL_MEMORY_FILE = 'GLOBAL.md' +const GLOBAL_CONFIG_DIR = '.trae' +const STEERING_SUBDIR = 'steering' +const RULES_SUBDIR = 'rules' +const COMMANDS_SUBDIR = 'commands' +const SKILLS_SUBDIR = 'skills' +const SKILL_FILE_NAME = 'SKILL.md' + +type TraeOutputSource + = | {readonly kind: 'globalMemory', readonly content: string} + | {readonly kind: 'steeringRule', readonly content: string} + | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'skillMain', readonly skill: SkillPrompt} + | {readonly kind: 'skillChildDoc', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | {readonly kind: 'ignoreFile', readonly content: string} + +export class TraeIDEOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('TraeIDEOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: GLOBAL_MEMORY_FILE, + treatWorkspaceRootProjectAsProject: true, + indexignore: '.traeignore', + commands: { + subDir: COMMANDS_SUBDIR, + transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + }, + skills: { + subDir: SKILLS_SUBDIR + }, + cleanup: { + delete: { + project: { + dirs: ['.trae/rules', '.trae/commands', '.trae/skills'] + }, + global: { + dirs: ['.trae/steering', '.trae/commands', '.trae/skills'] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'global'], + singleScope: true + } + } + }) + } + + protected override getIgnoreOutputPath(): string | undefined { + if (this.indexignore == null) return void 0 + return path.join('.trae', '.ignore') + } + + private getGlobalSteeringDir(): string { + return this.joinPath(this.getGlobalConfigDir(), STEERING_SUBDIR) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {commands, skills, globalMemory, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const concreteProjects = this.getConcreteProjects(ctx) + const promptProjects = this.getProjectPromptOutputProjects(ctx) + const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) + const selectedCommands = commands != null + ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, command => this.resolveCommandSourceScope(command), this.getTopicScopeOverride(ctx, 'commands')) + : {items: [] as readonly CommandPrompt[]} + const selectedSkills = skills != null + ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) + : {items: [] as readonly SkillPrompt[]} + const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) + + if (globalMemory != null && activePromptScopes.has('global')) { + declarations.push({ + path: this.joinPath(this.getGlobalSteeringDir(), GLOBAL_MEMORY_FILE), + scope: 'global', + source: { + kind: 'globalMemory', + content: globalMemory.content as string + } satisfies TraeOutputSource + }) + } + + for (const project of promptProjects) { + const projectBase = this.resolveProjectRootDir(ctx, project) + if (projectBase == null) continue + + if (project.childMemoryPrompts != null && activePromptScopes.has('project')) { + for (const child of project.childMemoryPrompts) { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalizedChildPath = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '') + const globPattern = this.buildProjectRelativeGlobPattern(normalizedChildPath) + const steeringContent = this.buildMarkdownContent( + [ + this.buildPathGuardHint(normalizedChildPath), + '', + child.content as string + ].join('\n'), + {alwaysApply: false, globs: globPattern}, + ctx + ) + + declarations.push({ + path: path.join(projectBase, GLOBAL_CONFIG_DIR, RULES_SUBDIR, this.buildSteeringFileName(child)), + scope: 'project', + source: { + kind: 'steeringRule', + content: steeringContent + } satisfies TraeOutputSource + }) + } + } + } + + if (selectedCommands.selectedScope === 'project') { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectBase = this.resolveProjectConfigDir(ctx, project) + if (projectBase == null) continue + + const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') + for (const command of filteredCommands) { + declarations.push({ + path: path.join(projectBase, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), + scope: 'project', + source: {kind: 'command', command} satisfies TraeOutputSource + }) + } + } + } + + if (selectedCommands.selectedScope === 'global') { + const baseDir = this.getGlobalConfigDir() + const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') + for (const command of filteredCommands) { + declarations.push({ + path: path.join(baseDir, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), + scope: 'global', + source: {kind: 'command', command} satisfies TraeOutputSource + }) + } + } + + const pushSkillDeclarations = ( + baseDir: string, + scope: 'project' | 'global', + filteredSkills: readonly SkillPrompt[] + ): void => { + for (const skill of filteredSkills) { + const skillName = this.getSkillName(skill) + const skillDir = path.join(baseDir, SKILLS_SUBDIR, skillName) + declarations.push({ + path: path.join(skillDir, SKILL_FILE_NAME), + scope, + source: {kind: 'skillMain', skill} satisfies TraeOutputSource + }) + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), + scope, + source: { + kind: 'skillChildDoc', + content: childDoc.content as string + } satisfies TraeOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope, + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies TraeOutputSource + }) + } + } + } + } + + if (selectedSkills.selectedScope === 'project') { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectBase = this.resolveProjectConfigDir(ctx, project) + if (projectBase == null) continue + const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') + pushSkillDeclarations(projectBase, 'project', filteredSkills) + } + } + + if (selectedSkills.selectedScope === 'global') { + const baseDir = this.getGlobalConfigDir() + const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') + pushSkillDeclarations(baseDir, 'global', filteredSkills) + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of concreteProjects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) continue + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: { + kind: 'ignoreFile', + content: ignoreFile.content + } satisfies TraeOutputSource + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + const source = declaration.source as TraeOutputSource + switch (source.kind) { + case 'globalMemory': + case 'steeringRule': + case 'skillChildDoc': + case 'ignoreFile': return source.content + case 'command': return this.buildCommandContent(source.command, ctx) + case 'skillMain': { + const frontMatterData = this.buildSkillFrontMatter(source.skill) + return this.buildMarkdownContent(source.skill.content as string, frontMatterData, ctx) + } + case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + default: throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + protected override buildSkillFrontMatter(skill: SkillPrompt): Record { + const fm: Record = { + description: skill.yamlFrontMatter.description ?? '' + } + + if (skill.yamlFrontMatter.displayName != null) fm['name'] = skill.yamlFrontMatter.displayName + + return fm + } + + private buildSteeringFileName(child: ProjectChildrenMemoryPrompt): string { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '').replaceAll('/', '-') + return `trae-${normalized}.md` + } + + private buildPathGuardHint(normalizedChildPath: string): string { + if (normalizedChildPath.length === 0) { + return 'Scope guard: apply this rule to project source files only; do not apply to generated output directories (for example dist/, build/, out/, .next/, target/).' + } + + return [ + `Scope guard: this rule is for the project-root path "${normalizedChildPath}/" only.`, + `Do not apply this rule to generated output paths such as "dist/${normalizedChildPath}/", "build/${normalizedChildPath}/", "out/${normalizedChildPath}/", ".next/${normalizedChildPath}/", or "target/${normalizedChildPath}/".` + ].join('\n') + } + + private buildProjectRelativeGlobPattern(normalizedChildPath: string): string { + if (normalizedChildPath.length === 0) return '**/*' + return `${normalizedChildPath}/**` + } +} diff --git a/sdk/src/plugins/VisualStudioCodeIDEConfigOutputPlugin.ts b/sdk/src/plugins/VisualStudioCodeIDEConfigOutputPlugin.ts new file mode 100644 index 00000000..d65290d9 --- /dev/null +++ b/sdk/src/plugins/VisualStudioCodeIDEConfigOutputPlugin.ts @@ -0,0 +1,65 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin, IDEKind} from './plugin-core' + +const VSCODE_DIR = '.vscode' + +export class VisualStudioCodeIDEConfigOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('VisualStudioCodeIDEConfigOutputPlugin', { + cleanup: { + delete: { + project: { + files: ['.vscode/settings.json', '.vscode/extensions.json'] + } + } + }, + capabilities: {} + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {projects} = ctx.collectedOutputContext.workspace + const {vscodeConfigFiles} = ctx.collectedOutputContext + const vscodeConfigs = vscodeConfigFiles ?? [] + + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + for (const config of vscodeConfigs) { + const targetRelativePath = this.getTargetRelativePath(config) + declarations.push({ + path: this.resolvePath(projectDir.basePath, projectDir.path, targetRelativePath), + scope: 'project', + source: {content: config.content} + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + void ctx + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } + + private getTargetRelativePath(config: {type: IDEKind, dir: {path: string}}): string { + const sourcePath = config.dir.path + + if (config.type !== IDEKind.VSCode) return this.basename(sourcePath) + + const vscodeIndex = sourcePath.indexOf(VSCODE_DIR) + if (vscodeIndex !== -1) return sourcePath.slice(Math.max(0, vscodeIndex)) + return this.joinPath(VSCODE_DIR, this.basename(sourcePath)) + } +} diff --git a/sdk/src/plugins/WarpIDEOutputPlugin.test.ts b/sdk/src/plugins/WarpIDEOutputPlugin.test.ts new file mode 100644 index 00000000..cfd2b31e --- /dev/null +++ b/sdk/src/plugins/WarpIDEOutputPlugin.test.ts @@ -0,0 +1,75 @@ +import type {GlobalMemoryPrompt, OutputWriteContext, ProjectRootMemoryPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {createLogger, FilePathKind, PromptKind} from './plugin-core' +import {WarpIDEOutputPlugin} from './WarpIDEOutputPlugin' + +function createGlobalMemoryPrompt(): GlobalMemoryPrompt { + return { + type: PromptKind.GlobalMemory, + content: 'global prompt', + length: 13, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'aindex/dist/global.mdx', + basePath: path.resolve('.'), + getDirectoryName: () => 'dist', + getAbsolutePath: () => path.resolve('aindex/dist/global.mdx') + }, + markdownContents: [] + } as GlobalMemoryPrompt +} + +function createWorkspaceRootPrompt(): ProjectRootMemoryPrompt { + return { + type: PromptKind.ProjectRootMemory, + content: 'workspace prompt', + length: 16, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + }, + markdownContents: [] + } as ProjectRootMemoryPrompt +} + +describe('warpIDEOutputPlugin workspace prompt support', () => { + it('writes the synthetic workspace root prompt to workspaceDir/WARP.md', async () => { + const workspaceBase = path.resolve('tmp/warp-workspace') + const plugin = new WarpIDEOutputPlugin() + const ctx = { + logger: createLogger('WarpIDEOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [{ + name: '__workspace__', + isWorkspaceRootProject: true, + rootMemoryPrompt: createWorkspaceRootPrompt() + }] + }, + globalMemory: createGlobalMemoryPrompt() + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const workspaceDeclaration = declarations.find(declaration => declaration.path === path.join(workspaceBase, 'WARP.md')) + + expect(workspaceDeclaration?.path).toBe(path.join(workspaceBase, 'WARP.md')) + expect(workspaceDeclaration?.scope).toBe('project') + expect((workspaceDeclaration?.source as {content?: string} | undefined)?.content).toContain('global prompt') + expect((workspaceDeclaration?.source as {content?: string} | undefined)?.content).toContain('workspace prompt') + }) +}) diff --git a/sdk/src/plugins/WarpIDEOutputPlugin.ts b/sdk/src/plugins/WarpIDEOutputPlugin.ts new file mode 100644 index 00000000..6168955b --- /dev/null +++ b/sdk/src/plugins/WarpIDEOutputPlugin.ts @@ -0,0 +1,110 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import * as path from 'node:path' +import {AbstractOutputPlugin, PLUGIN_NAMES} from './plugin-core' + +const PROJECT_MEMORY_FILE = 'WARP.md' + +export class WarpIDEOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('WarpIDEOutputPlugin', { + outputFileName: PROJECT_MEMORY_FILE, + treatWorkspaceRootProjectAsProject: true, + indexignore: '.warpindexignore', + cleanup: { + delete: { + project: { + files: [PROJECT_MEMORY_FILE] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {globalMemory, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const projects = this.getConcreteProjects(ctx) + const promptProjects = this.getProjectPromptOutputProjects(ctx) + const agentsRegistered = this.shouldSkipDueToPlugin(ctx, PLUGIN_NAMES.AgentsOutput) + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) + const globalMemoryContent = this.extractGlobalMemoryContent(ctx) + + if (agentsRegistered) { + if (globalMemory != null && activePromptScopes.has('global')) { + for (const project of promptProjects) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + declarations.push({ + path: path.join(projectRootDir, PROJECT_MEMORY_FILE), + scope: 'project', + source: {content: globalMemory.content as string} + }) + } + } + } else { + for (const project of promptProjects) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + + if (project.rootMemoryPrompt != null && activePromptScopes.has('project')) { + const combinedContent = this.combineGlobalWithContent( + globalMemoryContent, + project.rootMemoryPrompt.content as string + ) + declarations.push({ + path: path.join(projectRootDir, PROJECT_MEMORY_FILE), + scope: 'project', + source: {content: combinedContent} + }) + } + + if (project.childMemoryPrompts != null && activePromptScopes.has('project')) { + for (const child of project.childMemoryPrompts) { + declarations.push({ + path: this.resolveFullPath(child.dir), + scope: 'project', + source: {content: child.content as string} + }) + } + } + } + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) continue + declarations.push({ + path: this.resolvePath(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: {content: ignoreFile.content} + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + void ctx + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } +} diff --git a/sdk/src/plugins/WindsurfOutputPlugin.test.ts b/sdk/src/plugins/WindsurfOutputPlugin.test.ts new file mode 100644 index 00000000..dbe7f76d --- /dev/null +++ b/sdk/src/plugins/WindsurfOutputPlugin.test.ts @@ -0,0 +1,212 @@ +import type {CommandPrompt, OutputScopeSelection, OutputWriteContext, Project, RulePrompt, SkillPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {createLogger, FilePathKind, PromptKind} from './plugin-core' +import {WindsurfOutputPlugin} from './WindsurfOutputPlugin' + +function createCommandPrompt(scope: 'project' | 'global', seriName: string): CommandPrompt { + return { + type: PromptKind.Command, + content: 'command content', + length: 15, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'dev/build.mdx', + basePath: path.resolve('tmp/dist/commands'), + getDirectoryName: () => 'dev', + getAbsolutePath: () => path.resolve('tmp/dist/commands/dev/build.mdx') + }, + commandPrefix: 'dev', + commandName: 'build', + seriName, + yamlFrontMatter: { + namingCase: 'kebabCase', + description: 'Build command', + scope + }, + markdownContents: [] + } as CommandPrompt +} + +function createSkillPrompt(scope: 'project' | 'global', seriName: string): SkillPrompt { + return { + type: PromptKind.Skill, + content: 'skill content', + length: 13, + filePathKind: FilePathKind.Relative, + skillName: 'ship-it', + dir: { + pathKind: FilePathKind.Relative, + path: 'skills/ship-it', + basePath: path.resolve('tmp/dist/skills'), + getDirectoryName: () => 'ship-it', + getAbsolutePath: () => path.resolve('tmp/dist/skills/ship-it') + }, + seriName, + yamlFrontMatter: { + namingCase: 'kebabCase', + description: 'Ship release', + scope + }, + markdownContents: [] + } as SkillPrompt +} + +function createRulePrompt(scope: 'project' | 'global'): RulePrompt { + return { + type: PromptKind.Rule, + content: 'rule body', + length: 9, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'rules/ops/guard.mdx', + basePath: path.resolve('tmp/dist/rules'), + getDirectoryName: () => 'ops', + getAbsolutePath: () => path.resolve('tmp/dist/rules/ops/guard.mdx') + }, + prefix: 'ops', + ruleName: 'guard', + globs: ['src/**'], + scope, + markdownContents: [] + } as RulePrompt +} + +function createProject(workspaceBase: string, name: string, includeSeries: readonly string[], promptSource = false): Project { + return { + name, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: name, + basePath: workspaceBase, + getDirectoryName: () => name, + getAbsolutePath: () => path.join(workspaceBase, name) + }, + isPromptSourceProject: promptSource, + projectConfig: { + includeSeries: [...includeSeries] + } + } as Project +} + +function createWorkspaceRootProject(): Project { + return { + name: '__workspace__', + isWorkspaceRootProject: true + } as Project +} + +function createWriteContext( + workspaceBase: string, + projects: readonly Project[], + commands: readonly CommandPrompt[], + skills: readonly SkillPrompt[], + scopeOverrides: { + readonly commands: OutputScopeSelection + readonly skills: OutputScopeSelection + } +): OutputWriteContext { + return { + logger: createLogger('WindsurfOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + pluginOptions: { + outputScopes: { + plugins: { + WindsurfOutputPlugin: scopeOverrides + } + } + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [...projects] + }, + commands, + skills + } + } as OutputWriteContext +} + +describe('windsurfOutputPlugin synthetic workspace project output', () => { + it('writes workflows and skills to each real project when project scope is selected', async () => { + const workspaceBase = path.resolve('tmp/windsurf-project-scope') + const plugin = new WindsurfOutputPlugin() + const context = createWriteContext( + workspaceBase, + [ + createProject(workspaceBase, 'alpha-project', ['alpha'], true), + createProject(workspaceBase, 'beta-project', ['beta']) + ], + [createCommandPrompt('project', 'alpha')], + [createSkillPrompt('project', 'alpha')], + {commands: 'project', skills: 'project'} + ) + + const declarations = await plugin.declareOutputFiles(context) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(workspaceBase, 'alpha-project', '.windsurf', 'workflows', 'dev-build.md')) + expect(paths).toContain(path.join(workspaceBase, 'alpha-project', '.windsurf', 'skills', 'ship-it', 'SKILL.md')) + expect(paths.some(outputPath => outputPath.includes(path.join('beta-project', '.windsurf')))).toBe(false) + expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) + }) + + it('writes project-scoped workflows and skills into workspace root via the synthetic workspace project', async () => { + const workspaceBase = path.resolve('tmp/windsurf-workspace-project') + const plugin = new WindsurfOutputPlugin() + const context = createWriteContext( + workspaceBase, + [createWorkspaceRootProject()], + [createCommandPrompt('project', 'alpha')], + [createSkillPrompt('project', 'alpha')], + {commands: 'project', skills: 'project'} + ) + + const declarations = await plugin.declareOutputFiles(context) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(workspaceBase, '.windsurf', 'workflows', 'dev-build.md')) + expect(paths).toContain(path.join(workspaceBase, '.windsurf', 'skills', 'ship-it', 'SKILL.md')) + expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) + }) + + it('writes project-scoped rules into workspace-root .windsurf/rules via the synthetic workspace project', async () => { + const workspaceBase = path.resolve('tmp/windsurf-workspace-rules') + const plugin = new WindsurfOutputPlugin() + const context = { + logger: createLogger('WindsurfOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [createWorkspaceRootProject()] + }, + rules: [createRulePrompt('project')] + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(context) + + expect(declarations.map(declaration => declaration.path)).toContain( + path.join(workspaceBase, '.windsurf', 'rules', 'rule-ops-guard.md') + ) + expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) + }) +}) diff --git a/sdk/src/plugins/WindsurfOutputPlugin.ts b/sdk/src/plugins/WindsurfOutputPlugin.ts new file mode 100644 index 00000000..d18a0795 --- /dev/null +++ b/sdk/src/plugins/WindsurfOutputPlugin.ts @@ -0,0 +1,278 @@ +import type {CommandPrompt, OutputFileDeclaration, OutputWriteContext, RulePrompt, SkillPrompt} from './plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import {AbstractOutputPlugin, applySubSeriesGlobPrefix, filterByProjectConfig, PLUGIN_NAMES} from './plugin-core' + +const CODEIUM_WINDSURF_DIR = '.codeium/windsurf' +const WORKFLOWS_SUBDIR = 'global_workflows' +const PROJECT_WORKFLOWS_SUBDIR = 'workflows' +const MEMORIES_SUBDIR = 'memories' +const GLOBAL_MEMORY_FILE = 'global_rules.md' +const SKILLS_SUBDIR = 'skills' +const SKILL_FILE_NAME = 'SKILL.md' +const WINDSURF_RULES_DIR = '.windsurf' +const WINDSURF_RULES_SUBDIR = 'rules' +const RULE_FILE_PREFIX = 'rule-' + +type WindsurfOutputSource + = | {readonly kind: 'globalMemory', readonly content: string} + | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'skillMain', readonly skill: SkillPrompt} + | {readonly kind: 'skillChildDoc', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | {readonly kind: 'rule', readonly rule: RulePrompt} + | {readonly kind: 'ignoreFile', readonly content: string} + +export class WindsurfOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('WindsurfOutputPlugin', { + globalConfigDir: CODEIUM_WINDSURF_DIR, + outputFileName: '', + treatWorkspaceRootProjectAsProject: true, + dependsOn: [PLUGIN_NAMES.AgentsOutput], + indexignore: '.codeiumignore', + commands: { + subDir: WORKFLOWS_SUBDIR, + transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + }, + skills: { + subDir: SKILLS_SUBDIR + }, + rules: { + sourceScopes: ['project', 'global'] + }, + cleanup: { + delete: { + project: { + dirs: ['.windsurf/rules', '.windsurf/workflows', '.windsurf/global_workflows', '.windsurf/skills', '.codeium/windsurf/global_workflows', '.codeium/windsurf/skills'] + }, + global: { + dirs: ['.codeium/windsurf/global_workflows', '.codeium/windsurf/memories', '.codeium/windsurf/skills'] + } + } + }, + capabilities: { + prompt: { + scopes: ['global'], + singleScope: false + }, + rules: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {commands, skills, globalMemory, rules, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) + const globalBase = this.getCodeiumWindsurfDir() + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['global'])) + const selectedCommands = commands != null + ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, command => this.resolveCommandSourceScope(command), this.getTopicScopeOverride(ctx, 'commands')) + : {items: [] as readonly CommandPrompt[]} + const selectedSkills = skills != null + ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) + : {items: [] as readonly SkillPrompt[]} + const concreteProjects = this.getConcreteProjects(ctx) + + if (globalMemory != null && activePromptScopes.has('global')) { + declarations.push({ + path: path.join(globalBase, MEMORIES_SUBDIR, GLOBAL_MEMORY_FILE), + scope: 'global', + source: { + kind: 'globalMemory', + content: globalMemory.content as string + } satisfies WindsurfOutputSource + }) + } + + const pushSkillDeclarations = ( + basePath: string, + scope: 'project' | 'global', + skill: SkillPrompt + ): void => { + const skillName = this.getSkillName(skill) + const skillDir = path.join(basePath, SKILLS_SUBDIR, skillName) + declarations.push({ + path: path.join(skillDir, SKILL_FILE_NAME), + scope, + source: {kind: 'skillMain', skill} satisfies WindsurfOutputSource + }) + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), + scope, + source: { + kind: 'skillChildDoc', + content: childDoc.content as string + } satisfies WindsurfOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope, + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies WindsurfOutputSource + }) + } + } + } + + if (selectedSkills.selectedScope === 'project') { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + const projectBase = projectRootDir == null ? void 0 : path.join(projectRootDir, WINDSURF_RULES_DIR) + if (projectBase == null) continue + const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') + for (const skill of filteredSkills) pushSkillDeclarations(projectBase, 'project', skill) + } + } + + if (selectedSkills.selectedScope === 'global') { + const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') + for (const skill of filteredSkills) pushSkillDeclarations(globalBase, 'global', skill) + } + + const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) + if (selectedCommands.selectedScope === 'project') { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + const projectBase = projectRootDir == null ? void 0 : path.join(projectRootDir, WINDSURF_RULES_DIR) + if (projectBase == null) continue + const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') + for (const command of filteredCommands) { + declarations.push({ + path: path.join(projectBase, PROJECT_WORKFLOWS_SUBDIR, this.transformCommandName(command, transformOptions)), + scope: 'project', + source: {kind: 'command', command} satisfies WindsurfOutputSource + }) + } + } + } + + if (selectedCommands.selectedScope === 'global') { + const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') + for (const command of filteredCommands) { + declarations.push({ + path: path.join(globalBase, WORKFLOWS_SUBDIR, this.transformCommandName(command, transformOptions)), + scope: 'global', + source: {kind: 'command', command} satisfies WindsurfOutputSource + }) + } + } + + if (rules != null && rules.length > 0) { + const activeRuleScopes = new Set(this.selectRuleScopes(ctx, rules)) + const globalRules = rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'global') + if (activeRuleScopes.has('global')) { + for (const rule of globalRules) { + declarations.push({ + path: path.join(globalBase, MEMORIES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'global', + source: {kind: 'rule', rule} satisfies WindsurfOutputSource + }) + } + } + + if (activeRuleScopes.has('project')) { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + + const projectRules = applySubSeriesGlobPrefix( + filterByProjectConfig(rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'project'), project.projectConfig, 'rules'), + project.projectConfig + ) + for (const rule of projectRules) { + declarations.push({ + path: path.join(projectRootDir, WINDSURF_RULES_DIR, WINDSURF_RULES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'project', + source: {kind: 'rule', rule} satisfies WindsurfOutputSource + }) + } + } + } + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of concreteProjects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) continue + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: { + kind: 'ignoreFile', + content: ignoreFile.content + } satisfies WindsurfOutputSource + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + const source = declaration.source as WindsurfOutputSource + switch (source.kind) { + case 'globalMemory': + case 'skillChildDoc': + case 'ignoreFile': return source.content + case 'command': return this.buildCommandContent(source.command, ctx) + case 'skillMain': { + const frontMatterData = this.buildSkillFrontMatter(source.skill) + return this.buildMarkdownContent(source.skill.content as string, frontMatterData, ctx) + } + case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + case 'rule': return this.buildRuleContent(source.rule, ctx) + default: throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + private getCodeiumWindsurfDir(): string { return path.join(this.getHomeDir(), CODEIUM_WINDSURF_DIR) } + + protected override buildRuleFileName(rule: RulePrompt, prefix: string = RULE_FILE_PREFIX): string { + return `${prefix}${rule.prefix}-${rule.ruleName}.md` + } + + protected override buildRuleContent(rule: RulePrompt, ctx?: OutputWriteContext): string { + const fmData: Record = {trigger: 'glob', globs: rule.globs.length > 0 ? rule.globs.join(', ') : ''} + const raw = this.buildMarkdownContent(rule.content, fmData, ctx) + const lines = raw.split('\n') + return lines.map(line => { + const match = /^(\s*globs:\s*)(['"])(.*)\2\s*$/.exec(line) + if (match == null) return line + const prefix = match[1] ?? 'globs: ' + const value = match[3] ?? '' + if (value.trim().length === 0) return line + return `${prefix}${value}` + }).join('\n') + } +} diff --git a/sdk/src/plugins/WslMirrorDeclarations.test.ts b/sdk/src/plugins/WslMirrorDeclarations.test.ts new file mode 100644 index 00000000..69f48e58 --- /dev/null +++ b/sdk/src/plugins/WslMirrorDeclarations.test.ts @@ -0,0 +1,25 @@ +import {describe, expect, it} from 'vitest' +import {ClaudeCodeCLIOutputPlugin} from './ClaudeCodeCLIOutputPlugin' +import {CodexCLIOutputPlugin} from './CodexCLIOutputPlugin' + +describe('wSL mirror declarations', () => { + it('declares the expected Claude host config files', async () => { + const plugin = new ClaudeCodeCLIOutputPlugin() + const declarations = await plugin.declareWslMirrorFiles?.({} as never) + + expect(declarations).toEqual([ + {sourcePath: '~/.claude/settings.json'}, + {sourcePath: '~/.claude/config.json'} + ]) + }) + + it('declares the expected Codex host config files', async () => { + const plugin = new CodexCLIOutputPlugin() + const declarations = await plugin.declareWslMirrorFiles?.({} as never) + + expect(declarations).toEqual([ + {sourcePath: '~/.codex/config.toml'}, + {sourcePath: '~/.codex/auth.json'} + ]) + }) +}) diff --git a/sdk/src/plugins/ZedIDEConfigOutputPlugin.ts b/sdk/src/plugins/ZedIDEConfigOutputPlugin.ts new file mode 100644 index 00000000..2da8d739 --- /dev/null +++ b/sdk/src/plugins/ZedIDEConfigOutputPlugin.ts @@ -0,0 +1,64 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin, IDEKind} from './plugin-core' + +const ZED_DIR = '.zed' + +export class ZedIDEConfigOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('ZedIDEConfigOutputPlugin', { + cleanup: { + delete: { + project: { + files: ['.zed/settings.json'] + } + } + }, + capabilities: {} + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {projects} = ctx.collectedOutputContext.workspace + const zedConfigs = ctx.collectedOutputContext.zedConfigFiles ?? [] + + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + for (const config of zedConfigs) { + const targetRelativePath = this.getTargetRelativePath(config) + declarations.push({ + path: this.resolvePath(projectDir.basePath, projectDir.path, targetRelativePath), + scope: 'project', + source: {content: config.content} + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + void ctx + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } + + private getTargetRelativePath(config: {type: IDEKind, dir: {path: string}}): string { + const sourcePath = config.dir.path + + if (config.type !== IDEKind.Zed) return this.basename(sourcePath) + + const zedIndex = sourcePath.indexOf(ZED_DIR) + if (zedIndex !== -1) return sourcePath.slice(Math.max(0, zedIndex)) + return this.joinPath(ZED_DIR, 'settings.json') + } +} diff --git a/sdk/src/plugins/desk-paths.test.ts b/sdk/src/plugins/desk-paths.test.ts new file mode 100644 index 00000000..a266f9e5 --- /dev/null +++ b/sdk/src/plugins/desk-paths.test.ts @@ -0,0 +1,141 @@ +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {afterEach, describe, expect, it, vi} from 'vitest' + +import {deleteEmptyDirectories, deleteFiles, deleteTargets, getPlatformFixedDir} from '../core/desk-paths' + +const defaultNativeBinding = globalThis.__TNMSC_TEST_NATIVE_BINDING__ + +describe('desk paths', () => { + afterEach(() => { + vi.restoreAllMocks() + vi.clearAllMocks() + globalThis.__TNMSC_TEST_NATIVE_BINDING__ = defaultNativeBinding + }) + + it('delegates getPlatformFixedDir to the native binding', () => { + const getPlatformFixedDirMock = vi.fn(() => '/tmp/native-fixed-dir') + globalThis.__TNMSC_TEST_NATIVE_BINDING__ = { + ...defaultNativeBinding, + getPlatformFixedDir: getPlatformFixedDirMock + } + + expect(getPlatformFixedDir()).toBe('/tmp/native-fixed-dir') + expect(getPlatformFixedDirMock).toHaveBeenCalledOnce() + }) + + it('throws when the native desk-paths binding is unavailable', () => { + globalThis.__TNMSC_TEST_NATIVE_BINDING__ = void 0 + + expect(() => getPlatformFixedDir()).toThrow('Native desk-paths binding is required') + }) + + it('deletes mixed file and directory targets in one batch', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-desk-paths-delete-targets-')) + const outputFile = path.join(tempDir, 'output.txt') + const outputDir = path.join(tempDir, 'nested') + const nestedFile = path.join(outputDir, 'artifact.txt') + + try { + fs.mkdirSync(outputDir, {recursive: true}) + fs.writeFileSync(outputFile, 'file', 'utf8') + fs.writeFileSync(nestedFile, 'nested', 'utf8') + + const result = await deleteTargets({ + files: [outputFile], + dirs: [outputDir] + }) + + expect(result.deletedFiles).toEqual([outputFile]) + expect(result.deletedDirs).toEqual([outputDir]) + expect(result.fileErrors).toEqual([]) + expect(result.dirErrors).toEqual([]) + expect(fs.existsSync(outputFile)).toBe(false) + expect(fs.existsSync(outputDir)).toBe(false) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('caps delete file concurrency to the configured worker limit', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-desk-paths-concurrency-')) + const files = Array.from({length: 40}, (_, index) => path.join(tempDir, `artifact-${index}.txt`)) + let active = 0 + let maxActive = 0 + const originalLstat = fs.promises.lstat.bind(fs.promises) + + try { + fs.mkdirSync(tempDir, {recursive: true}) + for (const filePath of files) fs.writeFileSync(filePath, 'artifact', 'utf8') + + vi.spyOn(fs.promises, 'lstat').mockImplementation(async filePath => { + active += 1 + maxActive = Math.max(maxActive, active) + await new Promise(resolve => setTimeout(resolve, 20)) + + try { + return await originalLstat(filePath) + } + finally { + active -= 1 + } + }) + + const result = await deleteFiles(files) + + expect(result.deleted).toBe(files.length) + expect(result.errors).toEqual([]) + expect(maxActive).toBeLessThanOrEqual(32) + expect(maxActive).toBeGreaterThan(1) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('deletes only empty directories from deepest to shallowest', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-desk-paths-empty-dirs-')) + const parentDir = path.join(tempDir, 'empty-parent') + const childDir = path.join(parentDir, 'leaf') + const nonEmptyDir = path.join(tempDir, 'non-empty') + + try { + fs.mkdirSync(childDir, {recursive: true}) + fs.mkdirSync(nonEmptyDir, {recursive: true}) + fs.writeFileSync(path.join(nonEmptyDir, 'keep.txt'), 'keep', 'utf8') + + const result = await deleteEmptyDirectories([parentDir, childDir, nonEmptyDir]) + + expect(result.deleted).toBe(2) + expect(result.deletedPaths).toEqual([childDir, parentDir]) + expect(result.errors).toEqual([]) + expect(fs.existsSync(parentDir)).toBe(false) + expect(fs.existsSync(nonEmptyDir)).toBe(true) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('skips directories that become non-empty before empty-directory deletion runs', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-desk-paths-empty-race-')) + const targetDir = path.join(tempDir, 'maybe-empty') + + try { + fs.mkdirSync(targetDir, {recursive: true}) + fs.writeFileSync(path.join(targetDir, 'new-file.txt'), 'late write', 'utf8') + + const result = await deleteEmptyDirectories([targetDir, path.join(tempDir, 'missing')]) + + expect(result.deleted).toBe(0) + expect(result.deletedPaths).toEqual([]) + expect(result.errors).toEqual([]) + expect(fs.existsSync(targetDir)).toBe(true) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/plugins/desk-paths.ts b/sdk/src/plugins/desk-paths.ts new file mode 100644 index 00000000..add7c1dd --- /dev/null +++ b/sdk/src/plugins/desk-paths.ts @@ -0,0 +1 @@ +export * from '../core/desk-paths' diff --git a/sdk/src/plugins/ide-config-output.test.ts b/sdk/src/plugins/ide-config-output.test.ts new file mode 100644 index 00000000..1fc47b65 --- /dev/null +++ b/sdk/src/plugins/ide-config-output.test.ts @@ -0,0 +1,238 @@ +import type { + OutputWriteContext, + Project, + ProjectIDEConfigFile +} from './plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {EditorConfigOutputPlugin} from './EditorConfigOutputPlugin' +import {JetBrainsIDECodeStyleConfigOutputPlugin} from './JetBrainsIDECodeStyleConfigOutputPlugin' +import {createLogger, FilePathKind, IDEKind} from './plugin-core' +import {VisualStudioCodeIDEConfigOutputPlugin} from './VisualStudioCodeIDEConfigOutputPlugin' +import {ZedIDEConfigOutputPlugin} from './ZedIDEConfigOutputPlugin' + +function createProject( + workspaceBase: string, + name: string, + promptSource = false +): Project { + return { + name, + isPromptSourceProject: promptSource, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: name, + basePath: workspaceBase, + getDirectoryName: () => name, + getAbsolutePath: () => path.join(workspaceBase, name) + } + } as Project +} + +function createConfigFile( + type: IDEKind, + sourcePath: string, + content: string +): ProjectIDEConfigFile { + return { + type, + content, + length: content.length, + filePathKind: FilePathKind.Absolute, + dir: { + pathKind: FilePathKind.Absolute, + path: sourcePath, + getDirectoryName: () => path.basename(sourcePath) + } + } as ProjectIDEConfigFile +} + +function createWriteContext(workspaceBase: string): OutputWriteContext { + return { + logger: createLogger('IdeConfigOutputPluginTest', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + createProject(workspaceBase, 'aindex', true), + createProject(workspaceBase, 'memory-sync') + ] + }, + editorConfigFiles: [ + createConfigFile( + IDEKind.EditorConfig, + path.join(workspaceBase, 'aindex', 'public', '.editorconfig'), + 'root = true\n' + ) + ], + vscodeConfigFiles: [ + createConfigFile( + IDEKind.VSCode, + path.join( + workspaceBase, + 'aindex', + 'public', + '.vscode', + 'settings.json' + ), + '{}\n' + ), + createConfigFile( + IDEKind.VSCode, + path.join( + workspaceBase, + 'aindex', + 'public', + '.vscode', + 'extensions.json' + ), + '{}\n' + ) + ], + zedConfigFiles: [ + createConfigFile( + IDEKind.Zed, + path.join(workspaceBase, 'aindex', 'public', '.zed', 'settings.json'), + '{"tab_size": 2}\n' + ) + ], + jetbrainsConfigFiles: [ + createConfigFile( + IDEKind.IntellijIDEA, + path.join(workspaceBase, 'aindex', 'public', '.idea', '.gitignore'), + '/workspace.xml\n' + ), + createConfigFile( + IDEKind.IntellijIDEA, + path.join( + workspaceBase, + 'aindex', + 'public', + '.idea', + 'codeStyles', + 'Project.xml' + ), + '\n' + ), + createConfigFile( + IDEKind.IntellijIDEA, + path.join( + workspaceBase, + 'aindex', + 'public', + '.idea', + 'codeStyles', + 'codeStyleConfig.xml' + ), + '\n' + ) + ] + } + } as OutputWriteContext +} + +describe('ide config output plugins', () => { + it('includes the prompt source project for editorconfig output', async () => { + const workspaceBase = path.resolve('tmp/ide-output-editorconfig') + const plugin = new EditorConfigOutputPlugin() + const declarations = await plugin.declareOutputFiles( + createWriteContext(workspaceBase) + ) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toEqual([ + path.join(workspaceBase, 'aindex', '.editorconfig'), + path.join(workspaceBase, 'memory-sync', '.editorconfig') + ]) + }) + + it('includes the prompt source project for vscode output', async () => { + const workspaceBase = path.resolve('tmp/ide-output-vscode') + const plugin = new VisualStudioCodeIDEConfigOutputPlugin() + const declarations = await plugin.declareOutputFiles( + createWriteContext(workspaceBase) + ) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toEqual([ + path.join(workspaceBase, 'aindex', '.vscode', 'settings.json'), + path.join(workspaceBase, 'aindex', '.vscode', 'extensions.json'), + path.join(workspaceBase, 'memory-sync', '.vscode', 'settings.json'), + path.join(workspaceBase, 'memory-sync', '.vscode', 'extensions.json') + ]) + }) + + it('includes the prompt source project for zed output and cleanup', async () => { + const workspaceBase = path.resolve('tmp/ide-output-zed') + const plugin = new ZedIDEConfigOutputPlugin() + const ctx = createWriteContext(workspaceBase) + const declarations = await plugin.declareOutputFiles(ctx) + const cleanup = await plugin.declareCleanupPaths(ctx) + + expect(declarations.map(declaration => declaration.path)).toEqual([ + path.join(workspaceBase, 'aindex', '.zed', 'settings.json'), + path.join(workspaceBase, 'memory-sync', '.zed', 'settings.json') + ]) + expect(cleanup.delete).toEqual([ + { + kind: 'file', + label: 'delete.project', + path: path.join(workspaceBase, 'aindex', '.zed', 'settings.json'), + scope: 'project' + }, + { + kind: 'file', + label: 'delete.project', + path: path.join(workspaceBase, 'memory-sync', '.zed', 'settings.json'), + scope: 'project' + } + ]) + }) + + it('includes the prompt source project for jetbrains output', async () => { + const workspaceBase = path.resolve('tmp/ide-output-jetbrains') + const plugin = new JetBrainsIDECodeStyleConfigOutputPlugin() + const declarations = await plugin.declareOutputFiles( + createWriteContext(workspaceBase) + ) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toEqual([ + path.join(workspaceBase, 'aindex', '.idea', '.gitignore'), + path.join(workspaceBase, 'aindex', '.idea', 'codeStyles', 'Project.xml'), + path.join( + workspaceBase, + 'aindex', + '.idea', + 'codeStyles', + 'codeStyleConfig.xml' + ), + path.join(workspaceBase, 'aindex', '.editorconfig'), + path.join(workspaceBase, 'memory-sync', '.idea', '.gitignore'), + path.join( + workspaceBase, + 'memory-sync', + '.idea', + 'codeStyles', + 'Project.xml' + ), + path.join( + workspaceBase, + 'memory-sync', + '.idea', + 'codeStyles', + 'codeStyleConfig.xml' + ), + path.join(workspaceBase, 'memory-sync', '.editorconfig') + ]) + }) +}) diff --git a/sdk/src/plugins/plugin-agentskills-compact.ts b/sdk/src/plugins/plugin-agentskills-compact.ts new file mode 100644 index 00000000..abe6e9b6 --- /dev/null +++ b/sdk/src/plugins/plugin-agentskills-compact.ts @@ -0,0 +1,3 @@ +export { + GenericSkillsOutputPlugin +} from './GenericSkillsOutputPlugin' diff --git a/sdk/src/plugins/plugin-agentsmd.ts b/sdk/src/plugins/plugin-agentsmd.ts new file mode 100644 index 00000000..2a8505e4 --- /dev/null +++ b/sdk/src/plugins/plugin-agentsmd.ts @@ -0,0 +1,3 @@ +export { + AgentsOutputPlugin +} from './AgentsOutputPlugin' diff --git a/sdk/src/plugins/plugin-claude-code-cli.ts b/sdk/src/plugins/plugin-claude-code-cli.ts new file mode 100644 index 00000000..e65d3791 --- /dev/null +++ b/sdk/src/plugins/plugin-claude-code-cli.ts @@ -0,0 +1,3 @@ +export { + ClaudeCodeCLIOutputPlugin +} from './ClaudeCodeCLIOutputPlugin' diff --git a/sdk/src/plugins/plugin-core.ts b/sdk/src/plugins/plugin-core.ts new file mode 100644 index 00000000..20b28833 --- /dev/null +++ b/sdk/src/plugins/plugin-core.ts @@ -0,0 +1,172 @@ +import type { + ILogger, + LoggerDiagnosticRecord, + LogLevel +} from '@truenine/logger' +import { + clearBufferedDiagnostics as clearBufferedDiagnosticsNative, + createLogger as createLoggerNative, + drainBufferedDiagnostics as drainBufferedDiagnosticsNative, + getGlobalLogLevel as getGlobalLogLevelNative, + setGlobalLogLevel as setGlobalLogLevelNative +} from '@truenine/logger' + +export { + AbstractInputCapability +} from '../inputs/AbstractInputCapability' + +export { + AbstractOutputPlugin +} from './plugin-core/AbstractOutputPlugin' + +export type { + AbstractOutputPluginOptions, + CleanupScopePathsConfig, + CombineOptions, + CommandNameTransformOptions, + CommandOutputConfig, + OutputCleanupConfig, + RuleContentOptions, + RuleOutputConfig, + SkillFrontMatterOptions, + SkillsOutputConfig, + SubAgentArtifactFormat, + SubAgentFileNameSource, + SubAgentNameTransformOptions, + SubAgentsOutputConfig +} from './plugin-core/AbstractOutputPlugin' + +export { + AbstractPlugin +} from './plugin-core/AbstractPlugin' + +export { + DEFAULT_USER_CONFIG, + FileExtensions, + FrontMatterFields, + GlobalConfigDirs, + hasSourcePromptExtension, + IgnoreFiles, + OutputFileNames, + OutputPrefixes, + OutputSubdirectories, + PathPlaceholders, + PLUGIN_NAMES, + PreservedSkills, + SourceLocaleExtensions, + SourcePromptExtensions, + SourcePromptFileExtensions, + ToolPresets, + WORKSPACE_ROOT_PROJECT_NAME +} from './plugin-core/constants' + +export type { + PluginName +} from './plugin-core/constants' + +export { + validateCommandMetadata, + validateRuleMetadata, + validateSkillMetadata, + validateSubAgentMetadata +} from './plugin-core/ExportMetadataTypes' + +export { + applySubSeriesGlobPrefix, + filterByProjectConfig, + findAllGitRepos, + findGitModuleInfoDirs, + resolveGitInfoDir +} from './plugin-core/filters' + +export type { + FilterConfigPath, + SeriesFilterable +} from './plugin-core/filters' + +export { + GlobalScopeCollector +} from './plugin-core/GlobalScopeCollector' + +export type { + GlobalScopeCollectorOptions, + ScopeRegistration +} from './plugin-core/GlobalScopeCollector' + +export { + ScopePriority, + ScopeRegistry +} from './plugin-core/GlobalScopeCollector' + +export { + createLocalizedPromptReader, + LocalizedPromptReader +} from './plugin-core/LocalizedPromptReader' + +export { + collectMcpServersFromSkills, + McpConfigManager, + transformMcpConfigForCursor, + transformMcpConfigForOpencode, + transformMcpServerMap +} from './plugin-core/McpConfigManager' + +export type { + McpConfigFormat, + McpConfigTransformer, + McpServerEntry, + McpWriteResult, + TransformedMcpConfig +} from './plugin-core/McpConfigManager' + +export { + clearPromptArtifactCache, + compileRawPromptArtifact, + readPromptArtifact +} from './plugin-core/PromptArtifactCache' + +export { + deriveSubAgentIdentity, + flattenPromptPath, + resolveSkillName, + resolveSubAgentCanonicalName +} from './plugin-core/PromptIdentity' + +export { + RegistryWriter +} from './plugin-core/RegistryWriter' + +export { + DEFAULT_SCOPE_PRIORITY, + resolveTopicScopes +} from './plugin-core/scopePolicy' + +export * from './plugin-core/types' + +export type { + DiagnosticLines, + ILogger, + LoggerDiagnosticInput, + LoggerDiagnosticRecord, + LogLevel +} from '@truenine/logger' + +export function clearBufferedDiagnostics(): void { + clearBufferedDiagnosticsNative() +} + +export function createLogger(namespace: string, logLevel?: LogLevel): ILogger { + return createLoggerNative(namespace, logLevel) +} + +export function drainBufferedDiagnostics(): LoggerDiagnosticRecord[] { + return drainBufferedDiagnosticsNative() +} + +export function getGlobalLogLevel(): LogLevel | undefined { + return getGlobalLogLevelNative() +} + +export function setGlobalLogLevel(level: LogLevel): void { + setGlobalLogLevelNative(level) +} diff --git a/sdk/src/plugins/plugin-core/AbstractOutputPlugin.frontmatter.test.ts b/sdk/src/plugins/plugin-core/AbstractOutputPlugin.frontmatter.test.ts new file mode 100644 index 00000000..e0393c18 --- /dev/null +++ b/sdk/src/plugins/plugin-core/AbstractOutputPlugin.frontmatter.test.ts @@ -0,0 +1,204 @@ +import type {CommandPrompt, OutputWriteContext, RulePrompt, SkillPrompt, SubAgentPrompt} from './types' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {createLogger, FilePathKind, PromptKind} from '../plugin-core' +import {AbstractOutputPlugin} from './AbstractOutputPlugin' + +class TestFrontMatterOutputPlugin extends AbstractOutputPlugin { + constructor(options?: ConstructorParameters[1]) { + super('TestFrontMatterOutputPlugin', { + globalConfigDir: '.tool', + outputFileName: '', + commands: { + sourceScopes: ['project'], + transformFrontMatter: () => ({description: 'command'}) + }, + subagents: { + sourceScopes: ['project'] + }, + skills: {}, + rules: { + sourceScopes: ['project'] + }, + ...options + }) + } + + async renderCommand(cmd: CommandPrompt, ctx: OutputWriteContext): Promise { + return this.buildCommandContent(cmd, ctx) + } + + renderSubAgent(agent: SubAgentPrompt, ctx: OutputWriteContext): string { + return this.buildSubAgentContent(agent, ctx) + } + + renderSkill(skill: SkillPrompt, ctx: OutputWriteContext): string { + return this.buildSkillMainContent(skill, ctx) + } + + renderRule(rule: RulePrompt, ctx: OutputWriteContext): string { + return this.buildRuleContent(rule, ctx) + } +} + +function createWriteContext(blankLineAfter?: boolean): OutputWriteContext { + const workspaceBase = path.resolve('tmp/frontmatter-workspace') + return { + logger: createLogger('TestFrontMatterOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + pluginOptions: blankLineAfter == null + ? {} + : { + frontMatter: { + blankLineAfter + } + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => 'workspace' + }, + projects: [] + } + } + } as OutputWriteContext +} + +function createCommandPrompt(): CommandPrompt { + return { + type: PromptKind.Command, + content: 'command content', + length: 15, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'dev/build.mdx', + basePath: path.resolve('tmp/dist/commands'), + getDirectoryName: () => 'build', + getAbsolutePath: () => path.resolve('tmp/dist/commands/dev/build.mdx') + }, + commandPrefix: 'dev', + commandName: 'build', + yamlFrontMatter: { + namingCase: 'kebabCase', + description: 'Build command' + }, + markdownContents: [] + } as CommandPrompt +} + +function createSubAgentPrompt(): SubAgentPrompt { + return { + type: PromptKind.SubAgent, + content: 'subagent content', + length: 16, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'qa/boot.mdx', + basePath: path.resolve('tmp/dist/subagents'), + getDirectoryName: () => 'boot', + getAbsolutePath: () => path.resolve('tmp/dist/subagents/qa/boot.mdx') + }, + agentPrefix: 'qa', + agentName: 'boot', + canonicalName: 'qa-boot', + yamlFrontMatter: { + namingCase: 'kebabCase', + description: 'subagent desc' + }, + markdownContents: [] + } as SubAgentPrompt +} + +function createSkillPrompt(): SkillPrompt { + return { + type: PromptKind.Skill, + content: 'skill content', + length: 13, + filePathKind: FilePathKind.Relative, + skillName: 'ship-it', + dir: { + pathKind: FilePathKind.Relative, + path: 'skills/ship-it', + basePath: path.resolve('tmp/dist/skills'), + getDirectoryName: () => 'ship-it', + getAbsolutePath: () => path.resolve('tmp/dist/skills/ship-it') + }, + yamlFrontMatter: { + namingCase: 'kebabCase', + name: 'ship-it', + description: 'Ship release' + }, + markdownContents: [] + } as SkillPrompt +} + +function createRulePrompt(): RulePrompt { + return { + type: PromptKind.Rule, + content: 'rule content', + length: 12, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'rules/frontend.mdx', + basePath: path.resolve('tmp/dist/rules'), + getDirectoryName: () => 'frontend', + getAbsolutePath: () => path.resolve('tmp/dist/rules/frontend.mdx') + }, + prefix: 'frontend', + ruleName: 'guard', + globs: ['src/**'], + scope: 'project', + yamlFrontMatter: { + namingCase: 'kebabCase', + description: 'Rule desc' + }, + markdownContents: [] + } as RulePrompt +} + +describe('abstract output plugin front matter formatting', () => { + it('adds a blank line after front matter by default for command/rule/subagent/skill outputs', async () => { + const plugin = new TestFrontMatterOutputPlugin() + const ctx = createWriteContext() + + await expect(plugin.renderCommand(createCommandPrompt(), ctx)).resolves.toMatch(/\n---\n\ncommand content$/) + expect(plugin.renderRule(createRulePrompt(), ctx)).toMatch(/\n---\n\nrule content$/) + expect(plugin.renderSubAgent(createSubAgentPrompt(), ctx)).toMatch(/\n---\n\nsubagent content$/) + expect(plugin.renderSkill(createSkillPrompt(), ctx)).toMatch(/\n---\n\nskill content$/) + }) + + it('keeps the derived skill name in raw skill front matter output', () => { + const plugin = new TestFrontMatterOutputPlugin() + const ctx = createWriteContext() + + expect(plugin.renderSkill(createSkillPrompt(), ctx)).toContain('name: ship-it') + }) + + it('removes the extra blank line when frontMatter.blankLineAfter is false', async () => { + const plugin = new TestFrontMatterOutputPlugin() + const ctx = createWriteContext(false) + + await expect(plugin.renderCommand(createCommandPrompt(), ctx)).resolves.toMatch(/\n---\ncommand content$/) + expect(plugin.renderRule(createRulePrompt(), ctx)).toMatch(/\n---\nrule content$/) + expect(plugin.renderSubAgent(createSubAgentPrompt(), ctx)).toMatch(/\n---\nsubagent content$/) + expect(plugin.renderSkill(createSkillPrompt(), ctx)).toMatch(/\n---\nskill content$/) + }) + + it('keeps the blank line when a plugin opts out of the shared switch', async () => { + const plugin = new TestFrontMatterOutputPlugin({ + supportsBlankLineAfterFrontMatter: false + }) + const ctx = createWriteContext(false) + + await expect(plugin.renderCommand(createCommandPrompt(), ctx)).resolves.toMatch(/\n---\n\ncommand content$/) + }) +}) diff --git a/sdk/src/plugins/plugin-core/AbstractOutputPlugin.subagents.test.ts b/sdk/src/plugins/plugin-core/AbstractOutputPlugin.subagents.test.ts new file mode 100644 index 00000000..79fe1470 --- /dev/null +++ b/sdk/src/plugins/plugin-core/AbstractOutputPlugin.subagents.test.ts @@ -0,0 +1,114 @@ +import type {OutputWriteContext, SubAgentPrompt} from './types' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {createLogger, FilePathKind, PromptKind} from '../plugin-core' +import {AbstractOutputPlugin} from './AbstractOutputPlugin' + +class TestSubAgentOutputPlugin extends AbstractOutputPlugin { + constructor(options?: ConstructorParameters[1]) { + super('TestSubAgentOutputPlugin', { + globalConfigDir: '.tool', + outputFileName: '', + subagents: { + sourceScopes: ['project'], + ...options?.subagents + } + }) + } +} + +function createSubAgentPrompt(): SubAgentPrompt { + return { + type: PromptKind.SubAgent, + content: 'subagent content', + length: 16, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'qa/boot.mdx', + basePath: path.resolve('tmp/dist/subagents'), + getDirectoryName: () => 'boot', + getAbsolutePath: () => path.resolve('tmp/dist/subagents/qa/boot.mdx') + }, + agentPrefix: 'qa', + agentName: 'boot', + canonicalName: 'qa-boot', + yamlFrontMatter: { + namingCase: 'kebabCase', + description: 'subagent desc' + }, + markdownContents: [] + } as SubAgentPrompt +} + +function createWriteContext(subAgents: readonly SubAgentPrompt[]): OutputWriteContext { + const workspaceBase = path.resolve('tmp/workspace') + return { + logger: createLogger('TestSubAgentOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => 'workspace' + }, + projects: [{ + name: 'demo', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'demo', + basePath: workspaceBase, + getDirectoryName: () => 'demo', + getAbsolutePath: () => path.join(workspaceBase, 'demo') + } + }] + }, + subAgents + } + } as OutputWriteContext +} + +describe('abstract output plugin subagent naming', () => { + it('uses prefix-agent.ext by default', async () => { + const plugin = new TestSubAgentOutputPlugin() + const declarations = await plugin.declareOutputFiles(createWriteContext([createSubAgentPrompt()])) + const [declaration] = declarations + + expect(declaration?.path.endsWith(path.join('.tool', 'agents', 'qa-boot.md'))).toBe(true) + }) + + it('supports custom linkSymbol and ext for subagent output names', async () => { + const plugin = new TestSubAgentOutputPlugin({ + subagents: { + sourceScopes: ['project'], + linkSymbol: '_', + ext: '.markdown' + } + }) + const declarations = await plugin.declareOutputFiles(createWriteContext([createSubAgentPrompt()])) + const [declaration] = declarations + + expect(declaration?.path.endsWith(path.join('.tool', 'agents', 'qa_boot.markdown'))).toBe(true) + }) + + it('supports subagents.transformFrontMatter declaratively', async () => { + const plugin = new TestSubAgentOutputPlugin({ + subagents: { + sourceScopes: ['project'], + transformFrontMatter: () => ({role: 'qa'}) + } + }) + const declarations = await plugin.declareOutputFiles(createWriteContext([createSubAgentPrompt()])) + const [declaration] = declarations + if (declaration == null) throw new Error('Expected one subagent declaration') + + const content = await plugin.convertContent(declaration, createWriteContext([createSubAgentPrompt()])) + expect(String(content)).toContain('role:') + expect(String(content)).toContain('subagent content') + }) +}) diff --git a/sdk/src/plugins/plugin-core/AbstractOutputPlugin.ts b/sdk/src/plugins/plugin-core/AbstractOutputPlugin.ts new file mode 100644 index 00000000..33874f07 --- /dev/null +++ b/sdk/src/plugins/plugin-core/AbstractOutputPlugin.ts @@ -0,0 +1,1424 @@ +import type {BuildPromptTomlArtifactOptions} from '@truenine/md-compiler' +import type {ToolPresetName} from './GlobalScopeCollector' +import type {RegistryWriter} from './RegistryWriter' +import type { + CommandPrompt, + CommandSeriesPluginOverride, + ILogger, + OutputCleanContext, + OutputCleanupDeclarations, + OutputCleanupPathDeclaration, + OutputCleanupScope, + OutputDeclarationScope, + OutputFileDeclaration, + OutputPlugin, + OutputPluginCapabilities, + OutputPluginContext, + OutputScopeSelection, + OutputScopeTopic, + OutputTopicCapability, + OutputWriteContext, + Path, + Project, + ProjectConfig, + RegistryData, + RegistryOperationResult, + RulePrompt, + RuleScope, + SkillPrompt, + SubAgentPrompt, + WslMirrorFileDeclaration +} from './types' + +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import process from 'node:process' +import {buildPromptTomlArtifact} from '@truenine/md-compiler' +import {buildMarkdownWithFrontMatter, buildMarkdownWithRawFrontMatter} from '@truenine/md-compiler/markdown' +import {buildConfigDiagnostic, diagnosticLines} from '@/diagnostics' +import {getEffectiveHomeDir} from '@/runtime-environment' +import {AbstractPlugin} from './AbstractPlugin' +import {FilePathKind, PluginKind} from './enums' +import {applySubSeriesGlobPrefix, filterByProjectConfig} from './filters' +import {GlobalScopeCollector} from './GlobalScopeCollector' +import {compileRawPromptArtifact} from './PromptArtifactCache' +import {resolveSkillName, resolveSubAgentCanonicalName} from './PromptIdentity' +import {resolveTopicScopes} from './scopePolicy' +import {OUTPUT_SCOPE_TOPICS} from './types' + +interface ScopedSourceConfig { + /** Allowed source scopes for the topic */ + readonly sourceScopes?: readonly OutputDeclarationScope[] + /** Optional source-scope remap before output selection */ + readonly scopeRemap?: Partial> +} + +/** + * Options for building skill front matter + */ +export interface SkillFrontMatterOptions { + readonly includeTools?: boolean + readonly toolFormat?: 'array' | 'string' + readonly additionalFields?: Record +} + +/** + * Options for building rule content + */ +export interface RuleContentOptions { + readonly fileExtension: '.mdc' | '.md' + readonly alwaysApply: boolean + readonly globJoinPattern: ', ' | '|' | string + readonly frontMatterFormatter?: (globs: string) => unknown + readonly additionalFrontMatter?: Record +} + +/** + * Rule output configuration (declarative) + */ +export interface RuleOutputConfig { + /** Rules subdirectory, default 'rules' */ + readonly subDir?: string + /** Link symbol between series and ruleName, default '-' */ + readonly linkSymbol?: string + /** Rule file prefix, default 'rule' */ + readonly prefix?: string + /** Rule file extension, default '.md' */ + readonly ext?: string + /** Custom frontmatter transformer */ + readonly transformFrontMatter?: (rule: RulePrompt) => Record + /** Allowed rule source scopes, default ['project', 'global'] */ + readonly sourceScopes?: readonly OutputDeclarationScope[] +} + +/** + * Command output configuration (declarative) + */ +export interface CommandOutputConfig { + /** Commands subdirectory, default 'commands' */ + readonly subDir?: string + /** Custom command frontmatter transformer */ + readonly transformFrontMatter?: ( + cmd: CommandPrompt, + context: { + readonly sourceFrontMatter?: Record + readonly isRecompiled: boolean + } + ) => Record + /** Allowed command source scopes, default ['project', 'global'] */ + readonly sourceScopes?: readonly OutputDeclarationScope[] + /** Optional source-scope remap before output selection */ + readonly scopeRemap?: Partial> +} + +export type SubAgentArtifactFormat = 'markdown' | 'toml' +export type SubAgentFileNameSource = 'derivedPath' | 'frontMatterName' + +/** + * SubAgent output configuration (declarative) + */ +export interface SubAgentsOutputConfig extends ScopedSourceConfig { + /** SubAgents subdirectory, default 'agents' */ + readonly subDir?: string + /** Whether to include input-derived prefix in output filename, default true */ + readonly includePrefix?: boolean + /** Separator between prefix and agent name, default '-' */ + readonly linkSymbol?: string + /** SubAgent file extension, default '.md' */ + readonly ext?: string + /** Output artifact format, default 'markdown' */ + readonly artifactFormat?: SubAgentArtifactFormat + /** Field name that receives prompt body when artifactFormat='toml' */ + readonly bodyFieldName?: string + /** Source for output file name, default 'derivedPath' */ + readonly fileNameSource?: SubAgentFileNameSource + /** Front matter field remap before artifact emission */ + readonly fieldNameMap?: Readonly> + /** Front matter fields to exclude from artifact emission */ + readonly excludedFrontMatterFields?: readonly string[] + /** Additional fields injected into emitted artifact */ + readonly extraFields?: Readonly> + /** Preferred root-level field order for emitted artifact */ + readonly fieldOrder?: readonly string[] + /** Optional frontmatter transformer */ + readonly transformFrontMatter?: ( + subAgent: SubAgentPrompt, + context: { + readonly sourceFrontMatter?: Record + } + ) => Record +} + +/** + * Skills output configuration (declarative) + */ +export interface SkillsOutputConfig extends ScopedSourceConfig { + /** Skills subdirectory, default 'skills' */ + readonly subDir?: string +} + +/** + * Options for transforming command names in output filenames. + * Used by transformCommandName method to control prefix handling. + */ +export interface CommandNameTransformOptions { + readonly includeSeriesPrefix?: boolean + readonly seriesSeparator?: string +} + +/** + * Options for transforming subagent names in output filenames. + */ +export interface SubAgentNameTransformOptions { + readonly includePrefix?: boolean + readonly linkSymbol?: string + readonly ext?: string +} + +/** + * Cleanup path entries for one scope. + * Relative paths are resolved by scope base: + * - project: project root + * - global: user home + * - xdgConfig: XDG config home (defaults to ~/.config) + */ +export interface CleanupScopePathsConfig { + readonly files?: readonly string[] + readonly dirs?: readonly string[] + readonly globs?: readonly string[] +} + +/** + * Declarative cleanup configuration for output plugins. + */ +export interface OutputCleanupConfig { + readonly delete?: Partial> + readonly protect?: Partial> + readonly excludeScanGlobs?: readonly string[] +} + +/** + * Options for configuring AbstractOutputPlugin subclasses. + */ +export interface AbstractOutputPluginOptions { + globalConfigDir?: string + + outputFileName?: string + + treatWorkspaceRootProjectAsProject?: boolean + + dependsOn?: readonly string[] + + indexignore?: string + + /** Command output configuration (declarative) */ + commands?: CommandOutputConfig + + /** SubAgent output configuration (declarative) */ + subagents?: SubAgentsOutputConfig + + /** Skills output configuration (declarative) */ + skills?: SkillsOutputConfig + + toolPreset?: ToolPresetName + + /** Rule output configuration (declarative) */ + rules?: RuleOutputConfig + + /** Cleanup configuration (declarative) */ + cleanup?: OutputCleanupConfig + + /** Host-home files that should be mirrored into configured WSL instances */ + wslMirrors?: readonly string[] + + /** Explicit output capability matrix for scope override validation */ + capabilities?: OutputPluginCapabilities + + /** Whether this plugin honors the shared blank-line-after-front-matter option */ + supportsBlankLineAfterFrontMatter?: boolean +} + +/** + * Options for combining global content with project content. + */ +export interface CombineOptions { + separator?: string + + skipIfEmpty?: boolean + + position?: 'before' | 'after' +} + +type DeclarativeOutputSource + = | {readonly kind: 'projectRootMemory', readonly content: string} + | {readonly kind: 'projectChildMemory', readonly content: string} + | {readonly kind: 'globalMemory', readonly content: string} + | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'subAgent', readonly subAgent: SubAgentPrompt} + | {readonly kind: 'skillMain', readonly skill: SkillPrompt} + | {readonly kind: 'skillReference', readonly content: string} + | { + readonly kind: 'skillResource' + readonly content: string + readonly encoding: 'text' | 'base64' + } + | {readonly kind: 'rule', readonly rule: RulePrompt} + | {readonly kind: 'ignoreFile', readonly content: string} + +export abstract class AbstractOutputPlugin extends AbstractPlugin implements OutputPlugin { + readonly declarativeOutput = true as const + + readonly outputCapabilities: OutputPluginCapabilities + + protected readonly globalConfigDir: string + + protected readonly outputFileName: string + + protected readonly treatWorkspaceRootProjectAsProject: boolean + + protected readonly indexignore: string | undefined + + protected readonly commandsConfig: { + readonly subDir: string + readonly transformFrontMatter?: ( + cmd: CommandPrompt, + context: { + readonly sourceFrontMatter?: Record + readonly isRecompiled: boolean + } + ) => Record + readonly sourceScopes: readonly OutputDeclarationScope[] + readonly scopeRemap?: Partial> + } + + protected readonly subAgentsConfig: { + readonly subDir: string + readonly sourceScopes: readonly OutputDeclarationScope[] + readonly includePrefix: boolean + readonly linkSymbol: string + readonly ext: string + readonly artifactFormat: SubAgentArtifactFormat + readonly bodyFieldName?: string + readonly fileNameSource: SubAgentFileNameSource + readonly fieldNameMap?: Readonly> + readonly excludedFrontMatterFields?: readonly string[] + readonly extraFields?: Readonly> + readonly fieldOrder?: readonly string[] + readonly scopeRemap?: Partial> + readonly transformFrontMatter?: ( + subAgent: SubAgentPrompt, + context: { + readonly sourceFrontMatter?: Record + } + ) => Record + } + + protected readonly commandOutputEnabled: boolean + + protected readonly subAgentOutputEnabled: boolean + + protected readonly skillsConfig: { + readonly subDir: string + readonly sourceScopes: readonly OutputDeclarationScope[] + readonly scopeRemap?: Partial> + } + + protected readonly skillOutputEnabled: boolean + + protected readonly toolPreset: ToolPresetName | undefined + + /** Rule output configuration */ + protected readonly rulesConfig: RuleOutputConfig + + protected readonly ruleOutputEnabled: boolean + + protected readonly cleanupConfig: OutputCleanupConfig + + protected readonly wslMirrorPaths: readonly string[] + + protected readonly supportsBlankLineAfterFrontMatter: boolean + + private readonly registryWriterCache: Map> = new Map() + + private warnedDeprecatedSubAgentFileNameSource = false + + protected constructor(name: string, options?: AbstractOutputPluginOptions) { + super(name, PluginKind.Output, options?.dependsOn) + this.globalConfigDir = options?.globalConfigDir ?? '' + this.outputFileName = options?.outputFileName ?? '' + this.treatWorkspaceRootProjectAsProject = options?.treatWorkspaceRootProjectAsProject ?? false + this.indexignore = options?.indexignore + + this.commandOutputEnabled = options?.commands != null + this.commandsConfig = this.createCommandsConfig(options?.commands) + this.subAgentOutputEnabled = options?.subagents != null + this.subAgentsConfig = this.createSubAgentsConfig(options?.subagents) + this.skillOutputEnabled = options?.skills != null + this.skillsConfig = this.createSkillsConfig(options?.skills) + this.toolPreset = options?.toolPreset + + this.ruleOutputEnabled = options?.rules != null + this.rulesConfig = { + ...options?.rules, + sourceScopes: options?.rules?.sourceScopes ?? ['project', 'global'] + } // Initialize rule output config with defaults + this.cleanupConfig = options?.cleanup ?? {} + this.wslMirrorPaths = options?.wslMirrors ?? [] + this.supportsBlankLineAfterFrontMatter = options?.supportsBlankLineAfterFrontMatter ?? true + + this.outputCapabilities = options?.capabilities != null ? this.normalizeCapabilities(options.capabilities) : this.buildInferredCapabilities() + } + + private createCommandsConfig(config?: CommandOutputConfig): AbstractOutputPlugin['commandsConfig'] { + return { + subDir: config?.subDir ?? 'commands', + sourceScopes: config?.sourceScopes ?? ['project', 'global'], + ...config?.scopeRemap != null && {scopeRemap: config.scopeRemap}, + ...config?.transformFrontMatter != null && { + transformFrontMatter: config.transformFrontMatter + } + } + } + + private createSubAgentsConfig(config?: SubAgentsOutputConfig): AbstractOutputPlugin['subAgentsConfig'] { + return { + subDir: config?.subDir ?? 'agents', + sourceScopes: config?.sourceScopes ?? ['project', 'global'], + includePrefix: config?.includePrefix ?? true, + linkSymbol: config?.linkSymbol ?? '-', + ext: config?.ext ?? '.md', + artifactFormat: config?.artifactFormat ?? 'markdown', + fileNameSource: config?.fileNameSource ?? 'derivedPath', + ...config?.bodyFieldName != null && { + bodyFieldName: config.bodyFieldName + }, + ...config?.fieldNameMap != null && { + fieldNameMap: config.fieldNameMap + }, + ...config?.excludedFrontMatterFields != null && { + excludedFrontMatterFields: config.excludedFrontMatterFields + }, + ...config?.extraFields != null && {extraFields: config.extraFields}, + ...config?.fieldOrder != null && {fieldOrder: config.fieldOrder}, + ...config?.scopeRemap != null && {scopeRemap: config.scopeRemap}, + ...config?.transformFrontMatter != null && { + transformFrontMatter: config.transformFrontMatter + } + } + } + + private createSkillsConfig(config?: SkillsOutputConfig): AbstractOutputPlugin['skillsConfig'] { + return { + subDir: config?.subDir ?? 'skills', + sourceScopes: config?.sourceScopes ?? ['project', 'global'], + ...config?.scopeRemap != null && {scopeRemap: config.scopeRemap} + } + } + + private buildInferredCapabilities(): OutputPluginCapabilities { + const capabilities: OutputPluginCapabilities = {} + + if (this.outputFileName.length > 0) { + capabilities.prompt = { + scopes: ['project', 'global'], + singleScope: false + } + } + + if (this.ruleOutputEnabled) { + capabilities.rules = { + scopes: this.rulesConfig.sourceScopes ?? ['project', 'global'], + singleScope: false + } + } + + if (this.commandOutputEnabled) { + capabilities.commands = { + scopes: this.commandsConfig.sourceScopes, + singleScope: true + } + } + + if (this.subAgentOutputEnabled) { + capabilities.subagents = { + scopes: this.subAgentsConfig.sourceScopes, + singleScope: true + } + } + + if (this.skillOutputEnabled) { + capabilities.skills = { + scopes: this.skillsConfig.sourceScopes, + singleScope: true + } + } + + return capabilities + } + + private normalizeCapabilities(capabilities: OutputPluginCapabilities): OutputPluginCapabilities { + const normalizedCapabilities: OutputPluginCapabilities = {} + for (const topic of OUTPUT_SCOPE_TOPICS) { + const capability = capabilities[topic] + if (capability == null) continue + + const normalized = this.normalizeCapability(capability) + if (normalized != null) normalizedCapabilities[topic] = normalized + } + return normalizedCapabilities + } + + private normalizeCapability(capability: OutputTopicCapability): OutputTopicCapability | undefined { + const uniqueScopes: OutputDeclarationScope[] = [] + for (const scope of capability.scopes) { + if (!uniqueScopes.includes(scope)) uniqueScopes.push(scope) + } + if (uniqueScopes.length === 0) return void 0 + return { + scopes: uniqueScopes, + singleScope: capability.singleScope + } + } + + protected resolvePromptSourceProjectConfig(ctx: OutputPluginContext | OutputWriteContext): ProjectConfig | undefined { + const projects = this.getConcreteProjects(ctx) + const promptSource = projects.find(p => p.isPromptSourceProject === true) + return promptSource?.projectConfig ?? projects[0]?.projectConfig + } + + protected getConcreteProjects(ctx: OutputPluginContext | OutputWriteContext): Project[] { + return ctx.collectedOutputContext.workspace.projects.filter(project => project.isWorkspaceRootProject !== true) + } + + protected isProjectPromptOutputTarget(project: Project): boolean { + return project.isPromptSourceProject !== true + } + + protected getProjectOutputProjects(ctx: OutputPluginContext | OutputWriteContext): Project[] { + const projects = [...this.getConcreteProjects(ctx)] + if (!this.treatWorkspaceRootProjectAsProject) return projects + + const workspaceRootProject = this.getWorkspaceRootProject(ctx) + if (workspaceRootProject != null) projects.push(workspaceRootProject) + return projects + } + + protected getProjectPromptOutputProjects(ctx: OutputPluginContext | OutputWriteContext): Project[] { + return this.getProjectOutputProjects(ctx).filter(project => this.isProjectPromptOutputTarget(project)) + } + + protected getWorkspaceRootProject(ctx: OutputPluginContext | OutputWriteContext): Project | undefined { + return ctx.collectedOutputContext.workspace.projects.find(project => project.isWorkspaceRootProject === true) + } + + protected resolveProjectRootDir(ctx: OutputPluginContext | OutputWriteContext, project: Project): string | undefined { + if (project.isWorkspaceRootProject === true) { + return this.resolveDirectoryPath(ctx.collectedOutputContext.workspace.directory) + } + + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) return void 0 + return this.resolveDirectoryPath(projectDir) + } + + protected resolveProjectConfigDir(ctx: OutputPluginContext | OutputWriteContext, project: Project): string | undefined { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) return void 0 + if (this.globalConfigDir.length === 0) return projectRootDir + return path.join(projectRootDir, this.globalConfigDir) + } + + protected isRelativePath(p: Path): boolean { + return p.pathKind === FilePathKind.Relative + } + + protected toRelativePath(p: Path): string { + return p.path + } + + protected resolveFullPath(targetPath: Path, outputFileName?: string): string { + const dirPath = this.resolveDirectoryPath(targetPath) + + const fileName = outputFileName ?? this.outputFileName // Append the output file name if provided or if default is set + if (fileName) return path.join(dirPath, fileName) + return dirPath + } + + protected resolveDirectoryPath(targetPath: Path): string { + if (targetPath.pathKind === FilePathKind.Absolute) return targetPath.path + if ('basePath' in targetPath) { + return path.resolve(targetPath.basePath as string, targetPath.path) + } + return path.resolve(process.cwd(), targetPath.path) + } + + protected getWorkspaceConfigDir(ctx: OutputWriteContext): string { + const workspaceDir = this.resolveDirectoryPath(ctx.collectedOutputContext.workspace.directory) + return path.join(workspaceDir, this.globalConfigDir) + } + + protected createRelativePath(pathStr: string, basePath: string, dirNameFn: () => string): string { + void dirNameFn + return path.join(basePath, pathStr) + } + + protected createFileRelativePath(dir: string, fileName: string): string { + return path.join(dir, fileName) + } + + protected getGlobalConfigDir(): string { + return path.join(this.getHomeDir(), this.globalConfigDir) + } + + protected getXdgConfigHomeDir(): string { + const xdgConfigHome = process.env['XDG_CONFIG_HOME'] + if (typeof xdgConfigHome === 'string' && xdgConfigHome.trim().length > 0) { + return xdgConfigHome + } + return path.join(this.getHomeDir(), '.config') + } + + protected getHomeDir(): string { + return getEffectiveHomeDir() + } + + protected joinPath(...segments: string[]): string { + return path.join(...segments) + } + + protected resolvePath(...segments: string[]): string { + return path.resolve(...segments) + } + + protected dirname(p: string): string { + return path.dirname(p) + } + + protected buildProjectPromptCleanupTargets(ctx: OutputCleanContext, fileName: string = this.outputFileName): readonly OutputCleanupPathDeclaration[] { + if (fileName.length === 0) return [] + + const declarations: OutputCleanupPathDeclaration[] = [] + const seenPaths = new Set() + + const pushCleanupFile = (targetPath: string, label: string): void => { + if (seenPaths.has(targetPath)) return + seenPaths.add(targetPath) + declarations.push({ + path: targetPath, + kind: 'file', + scope: 'project', + label + }) + } + + for (const project of this.getProjectPromptOutputProjects(ctx)) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + + // Add glob pattern to match all files with the given name in the project directory + // This ensures files in subdirectories not explicitly tracked as childMemoryPrompts are also cleaned up + declarations.push({ + path: this.resolvePath(projectRootDir, '**', fileName), + kind: 'glob', + scope: 'project', + label: 'delete.project.glob' + }) + + pushCleanupFile(this.resolvePath(projectRootDir, fileName), 'delete.project') + + if (project.childMemoryPrompts == null) continue + for (const child of project.childMemoryPrompts) { + pushCleanupFile(this.resolveFullPath(child.dir, fileName), 'delete.project.child') + } + } + + return declarations + } + + protected basename(p: string, ext?: string): string { + return path.basename(p, ext) + } + + protected getIgnoreOutputPath(): string | undefined { + if (this.indexignore == null) return void 0 + return this.indexignore + } + + private resolveCleanupScopeBasePaths(scope: OutputCleanupScope, ctx: OutputCleanContext): readonly string[] { + if (scope === 'global') return [this.getHomeDir()] + if (scope === 'xdgConfig') return [this.getXdgConfigHomeDir()] + + const projectBasePaths: string[] = [] + for (const project of this.getProjectOutputProjects(ctx)) { + const projectBasePath = this.resolveProjectRootDir(ctx, project) + if (projectBasePath == null) continue + projectBasePaths.push(projectBasePath) + } + return projectBasePaths + } + + private resolveCleanupDeclaredPath(basePath: string, declaredPath: string): string { + if (path.isAbsolute(declaredPath)) return path.resolve(declaredPath) + if (declaredPath === '~') return this.getHomeDir() + if (declaredPath.startsWith('~/') || declaredPath.startsWith('~\\')) { + return path.resolve(this.getHomeDir(), declaredPath.slice(2)) + } + return path.resolve(basePath, declaredPath) + } + + private normalizeGlobPattern(rawPattern: string): string { + return rawPattern.replaceAll('\\', '/') + } + + private buildCleanupTargetsFromScopeConfig( + scopeConfig: Partial> | undefined, + kind: 'delete' | 'protect', + ctx: OutputCleanContext + ): readonly OutputCleanupPathDeclaration[] { + if (scopeConfig == null) return [] + + const declarations: OutputCleanupPathDeclaration[] = [] + const scopes: readonly OutputCleanupScope[] = ['project', 'global', 'xdgConfig'] + + const pushTargets = (scope: OutputCleanupScope, targetKind: 'file' | 'directory' | 'glob', entries: readonly string[] | undefined): void => { + if (entries == null || entries.length === 0) return + const basePaths = this.resolveCleanupScopeBasePaths(scope, ctx) + + for (const entry of entries) { + for (const basePath of basePaths) { + const resolved = path.isAbsolute(entry) ? path.resolve(entry) : this.resolveCleanupDeclaredPath(basePath, entry) + + declarations.push({ + path: targetKind === 'glob' ? this.normalizeGlobPattern(resolved) : resolved, + kind: targetKind, + scope, + label: `${kind}.${scope}` + }) + } + } + } + + for (const scope of scopes) { + const entries = scopeConfig[scope] + if (entries == null) continue + pushTargets(scope, 'file', entries.files) + pushTargets(scope, 'directory', entries.dirs) + pushTargets(scope, 'glob', entries.globs) + } + + return declarations + } + + protected resolveFrontMatterBlankLineAfter(ctx?: OutputPluginContext): boolean { + if (!this.supportsBlankLineAfterFrontMatter) return true + return ctx?.pluginOptions?.frontMatter?.blankLineAfter ?? true + } + + protected buildMarkdownContent(content: string, frontMatter?: Record, ctx?: OutputPluginContext): string { + return buildMarkdownWithFrontMatter(frontMatter, content, { + blankLineAfter: this.resolveFrontMatterBlankLineAfter(ctx) + }) + } + + protected buildMarkdownContentWithRaw(content: string, frontMatter?: Record, rawFrontMatter?: string, ctx?: OutputPluginContext): string { + if (frontMatter != null && Object.keys(frontMatter).length > 0) { + return this.buildMarkdownContent(content, frontMatter, ctx) + } // If we have parsed front matter, use it + + if (rawFrontMatter != null && rawFrontMatter.length > 0) { + return buildMarkdownWithRawFrontMatter(rawFrontMatter, content, { + blankLineAfter: this.resolveFrontMatterBlankLineAfter(ctx) + }) + } // If we have raw front matter but parsing failed, use raw + + return content // No front matter + } + + protected buildTomlContent(options: BuildPromptTomlArtifactOptions): string { + return buildPromptTomlArtifact(options) + } + + protected extractGlobalMemoryContent(ctx: OutputWriteContext): string | undefined { + return ctx.collectedOutputContext.globalMemory?.content as string | undefined + } + + protected combineGlobalWithContent(globalContent: string | undefined, projectContent: string, options?: CombineOptions): string { + const {separator = '\n\n', skipIfEmpty = true, position = 'before'} = options ?? {} + + if (skipIfEmpty && (globalContent == null || globalContent.trim().length === 0)) { + return projectContent + } // Skip if global content is undefined/null or empty/whitespace when skipIfEmpty is true + + const effectiveGlobalContent = globalContent ?? '' // If global content is null/undefined but skipIfEmpty is false, treat as empty string + + if (position === 'after') { + return `${projectContent}${separator}${effectiveGlobalContent}` + } // Combine based on position + + return `${effectiveGlobalContent}${separator}${projectContent}` // Default: 'before' + } + + protected getSkillName(skill: SkillPrompt): string { + return resolveSkillName(skill) + } + + protected getSubAgentCanonicalName(subAgent: SubAgentPrompt): string { + return resolveSubAgentCanonicalName(subAgent) + } + + protected transformCommandName(cmd: CommandPrompt, options?: CommandNameTransformOptions): string { + const {includeSeriesPrefix = true, seriesSeparator = '-'} = options ?? {} + + if (!includeSeriesPrefix || cmd.commandPrefix == null) { + return `${cmd.commandName}.md` + } // If prefix should not be included or prefix is not present, return just commandName + + return `${cmd.commandPrefix}${seriesSeparator}${cmd.commandName}.md` + } + + protected transformSubAgentName(subAgent: SubAgentPrompt, options?: SubAgentNameTransformOptions): string { + const {fileNameSource} = this.subAgentsConfig + const includePrefix = options?.includePrefix ?? this.subAgentsConfig.includePrefix + const linkSymbol = options?.linkSymbol ?? this.subAgentsConfig.linkSymbol + const ext = options?.ext ?? this.subAgentsConfig.ext + const normalizedExt = ext.startsWith('.') ? ext : `.${ext}` + if (fileNameSource === 'frontMatterName') { + this.warnDeprecatedSubAgentFileNameSource() + } + + const hasPrefix = includePrefix && subAgent.agentPrefix != null && subAgent.agentPrefix.length > 0 + if (!hasPrefix) return `${subAgent.agentName}${normalizedExt}` + return `${subAgent.agentPrefix}${linkSymbol}${subAgent.agentName}${normalizedExt}` + } + + protected normalizeOutputFileStem(value: string): string { + const sanitizedCharacters = Array.from(value.trim(), character => { + const codePoint = character.codePointAt(0) ?? 0 + if (codePoint <= 31 || '<>:"/\\|?*'.includes(character)) return '-' + return character + }) + let normalized = sanitizedCharacters.join('') + + while (normalized.endsWith('.') || normalized.endsWith(' ')) { + normalized = normalized.slice(0, -1) + } + + if (normalized.length === 0) { + throw new Error(`Cannot derive a valid output file name from "${value}"`) + } + + return normalized + } + + private warnDeprecatedSubAgentFileNameSource(): void { + if (this.warnedDeprecatedSubAgentFileNameSource) return + this.warnedDeprecatedSubAgentFileNameSource = true + + this.log.warn( + buildConfigDiagnostic({ + code: 'SUBAGENT_FRONTMATTER_NAME_SOURCE_DEPRECATED', + title: 'Sub-agent fileNameSource="frontMatterName" now resolves from derived names', + reason: diagnosticLines( + `The ${this.name} plugin no longer reads authored sub-agent front matter names.`, + 'tnmsc now derives sub-agent names from the sub-agent path.' + ), + exactFix: diagnosticLines( + 'Remove authored `name` fields from sub-agent sources.', + 'Keep using `fileNameSource="frontMatterName"` only as a temporary alias for the derived-path naming behavior.' + ), + details: { + plugin: this.name + } + }) + ) + } + + protected appendSubAgentDeclarations( + declarations: OutputFileDeclaration[], + basePath: string, + scope: OutputDeclarationScope, + scopedSubAgents: readonly SubAgentPrompt[] + ): void { + const seenPaths = new Map() + + for (const subAgent of scopedSubAgents) { + const fileName = this.transformSubAgentName(subAgent) + const targetPath = path.join(basePath, this.subAgentsConfig.subDir, fileName) + const existingAgentName = seenPaths.get(targetPath) + + if (existingAgentName != null) { + throw new Error( + `Sub-agent output collision in ${this.name}: "${this.getSubAgentCanonicalName(subAgent)}" and "${existingAgentName}" both resolve to ${targetPath}` + ) + } + + seenPaths.set(targetPath, this.getSubAgentCanonicalName(subAgent)) + declarations.push({ + path: targetPath, + scope, + source: {kind: 'subAgent', subAgent} + }) + } + } + + protected appendCommandDeclarations( + declarations: OutputFileDeclaration[], + basePath: string, + scope: OutputDeclarationScope, + commands: readonly CommandPrompt[], + transformOptions: CommandNameTransformOptions + ): void { + for (const cmd of commands) { + const fileName = this.transformCommandName(cmd, transformOptions) + declarations.push({ + path: path.join(basePath, this.commandsConfig.subDir, fileName), + scope, + source: {kind: 'command', command: cmd} + }) + } + } + + protected appendSkillDeclarations( + declarations: OutputFileDeclaration[], + basePath: string, + scope: OutputDeclarationScope, + scopedSkills: readonly SkillPrompt[] + ): void { + for (const skill of scopedSkills) { + const skillName = this.getSkillName(skill) + const skillDir = path.join(basePath, this.skillsConfig.subDir, skillName) + + declarations.push({ + path: path.join(skillDir, 'SKILL.md'), + scope, + source: {kind: 'skillMain', skill} + }) + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, childDoc.dir.path.replace(/\.mdx$/, '.md')), + scope, + source: { + kind: 'skillReference', + content: childDoc.content as string + } + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope, + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } + }) + } + } + } + } + + protected appendRuleDeclarations(declarations: OutputFileDeclaration[], basePath: string, scope: OutputDeclarationScope, rules: readonly RulePrompt[]): void { + const rulesDir = path.join(basePath, this.rulesConfig.subDir ?? 'rules') + + for (const rule of rules) { + declarations.push({ + path: path.join(rulesDir, this.buildRuleFileName(rule)), + scope, + source: {kind: 'rule', rule} + }) + } + } + + protected buildSubAgentTomlContent(agent: SubAgentPrompt, frontMatter: Record | undefined): string { + const {bodyFieldName} = this.subAgentsConfig + if (bodyFieldName == null || bodyFieldName.length === 0) { + throw new Error(`subagents.bodyFieldName is required when artifactFormat="toml" for ${this.name}`) + } + + return this.buildTomlContent({ + content: agent.content, + bodyFieldName, + ...frontMatter != null && {frontMatter}, + ...this.subAgentsConfig.fieldNameMap != null && { + fieldNameMap: this.subAgentsConfig.fieldNameMap + }, + ...this.subAgentsConfig.excludedFrontMatterFields != null && { + excludedKeys: this.subAgentsConfig.excludedFrontMatterFields + }, + ...this.subAgentsConfig.extraFields != null && { + extraFields: this.subAgentsConfig.extraFields + }, + ...this.subAgentsConfig.fieldOrder != null && { + fieldOrder: this.subAgentsConfig.fieldOrder + } + }) + } + + protected getCommandSeriesOptions(ctx: OutputWriteContext): CommandSeriesPluginOverride { + const globalOptions = ctx.pluginOptions?.commandSeriesOptions + const pluginOverride = globalOptions?.pluginOverrides?.[this.name] + + const includeSeriesPrefix = pluginOverride?.includeSeriesPrefix ?? globalOptions?.includeSeriesPrefix // Only include properties that have defined values to satisfy exactOptionalPropertyTypes // Plugin-specific overrides take precedence over global settings + const seriesSeparator = pluginOverride?.seriesSeparator + + if (includeSeriesPrefix != null && seriesSeparator != null) { + return {includeSeriesPrefix, seriesSeparator} + } // Build result object conditionally to avoid assigning undefined to readonly properties + if (includeSeriesPrefix != null) return {includeSeriesPrefix} + if (seriesSeparator != null) return {seriesSeparator} + return {} + } + + protected getTransformOptionsFromContext(ctx: OutputWriteContext, additionalOptions?: CommandNameTransformOptions): CommandNameTransformOptions { + const seriesOptions = this.getCommandSeriesOptions(ctx) + + const includeSeriesPrefix = seriesOptions.includeSeriesPrefix ?? additionalOptions?.includeSeriesPrefix // Only include properties that have defined values to satisfy exactOptionalPropertyTypes // Merge: additionalOptions (plugin defaults) <- seriesOptions (config overrides) + const seriesSeparator = seriesOptions.seriesSeparator ?? additionalOptions?.seriesSeparator + + if (includeSeriesPrefix != null && seriesSeparator != null) { + return {includeSeriesPrefix, seriesSeparator} + } // Build result object conditionally to avoid assigning undefined to readonly properties + if (includeSeriesPrefix != null) return {includeSeriesPrefix} + if (seriesSeparator != null) return {seriesSeparator} + return {} + } + + protected shouldSkipDueToPlugin(ctx: OutputWriteContext, precedingPluginName: string): boolean { + const registeredPlugins = ctx.registeredPluginNames + if (registeredPlugins == null) return false + return registeredPlugins.includes(precedingPluginName) + } + + protected getRegistryWriter>(WriterClass: new (logger: ILogger) => T): T { + const cacheKey = WriterClass.name + + const cached = this.registryWriterCache.get(cacheKey) // Check cache first + if (cached != null) return cached as T + + const writer = new WriterClass(this.log) // Create new instance and cache it + this.registryWriterCache.set(cacheKey, writer as RegistryWriter) + return writer + } + + protected async registerInRegistry( + writer: RegistryWriter, + entries: readonly TEntry[], + ctx: OutputWriteContext + ): Promise { + return writer.register(entries, ctx.dryRun) + } + + protected normalizeRuleScope(rule: RulePrompt): RuleScope { + return rule.scope ?? 'project' + } + + protected normalizeSourceScope(scope: RuleScope | undefined): OutputDeclarationScope { + if (scope === 'global' || scope === 'project') return scope + return 'project' + } + + protected remapDeclarationScope( + scope: OutputDeclarationScope, + remap?: Partial> + ): OutputDeclarationScope { + return remap?.[scope] ?? scope + } + + protected resolveCommandSourceScope(cmd: CommandPrompt): OutputDeclarationScope { + if (cmd.globalOnly === true) return 'global' + const scope = (cmd.yamlFrontMatter as {scope?: RuleScope} | undefined)?.scope + return this.remapDeclarationScope(this.normalizeSourceScope(scope), this.commandsConfig.scopeRemap) + } + + protected resolveSubAgentSourceScope(subAgent: SubAgentPrompt): OutputDeclarationScope { + const scope = (subAgent.yamlFrontMatter as {scope?: RuleScope} | undefined)?.scope + return this.remapDeclarationScope(this.normalizeSourceScope(scope), this.subAgentsConfig.scopeRemap) + } + + protected resolveSkillSourceScope(skill: SkillPrompt): OutputDeclarationScope { + const scope = (skill.yamlFrontMatter as {scope?: RuleScope} | undefined)?.scope + return this.remapDeclarationScope(this.normalizeSourceScope(scope), this.skillsConfig.scopeRemap) + } + + protected selectSingleScopeItems( + items: readonly T[], + sourceScopes: readonly OutputDeclarationScope[], + resolveScope: (item: T) => OutputDeclarationScope, + requestedScopes?: OutputScopeSelection + ): { + readonly selectedScope?: OutputDeclarationScope + readonly items: readonly T[] + } { + if (items.length === 0) return {items: []} + + const availableScopes = [...new Set(items.map(resolveScope))] + const selectedScopes = resolveTopicScopes({ + requestedScopes, + defaultScopes: sourceScopes, + supportedScopes: sourceScopes, + singleScope: true, + availableScopes + }) + const [selectedScope] = selectedScopes + if (selectedScope == null) return {items: []} + + return { + selectedScope, + items: items.filter(item => resolveScope(item) === selectedScope) + } + } + + protected selectRuleScopes(ctx: OutputWriteContext, rules: readonly RulePrompt[]): readonly OutputDeclarationScope[] { + const availableScopes = [...new Set(rules.map(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule))))] + return resolveTopicScopes({ + requestedScopes: this.getTopicScopeOverride(ctx, 'rules'), + defaultScopes: this.rulesConfig.sourceScopes ?? ['project', 'global'], + supportedScopes: this.rulesConfig.sourceScopes ?? ['project', 'global'], + singleScope: false, + availableScopes + }).filter(scope => availableScopes.includes(scope)) + } + + protected selectPromptScopes( + ctx: OutputWriteContext, + supportedScopes: readonly OutputDeclarationScope[] = ['project', 'global'], + defaultScopes: readonly OutputDeclarationScope[] = supportedScopes + ): readonly OutputDeclarationScope[] { + return resolveTopicScopes({ + requestedScopes: this.getTopicScopeOverride(ctx, 'prompt'), + defaultScopes, + supportedScopes, + singleScope: false + }) + } + + protected getTopicScopeOverride(ctx: OutputPluginContext | OutputWriteContext, topic: OutputScopeTopic): OutputScopeSelection | undefined { + return ctx.pluginOptions?.outputScopes?.plugins?.[this.name]?.[topic] + } + + protected buildSkillFrontMatter(skill: SkillPrompt, options?: SkillFrontMatterOptions): Record { + const fm = skill.yamlFrontMatter + const result: Record = { + name: this.getSkillName(skill), + description: fm.description + } + + if ('displayName' in fm && fm.displayName != null) { + // Conditionally add optional fields + result['displayName'] = fm.displayName + } + if ('keywords' in fm && fm.keywords != null && fm.keywords.length > 0) { + result['keywords'] = fm.keywords + } + if ('author' in fm && fm.author != null) result['author'] = fm.author + if ('version' in fm && fm.version != null) result['version'] = fm.version + + const includeTools = options?.includeTools ?? true // Handle tools based on options + if (includeTools && 'allowTools' in fm && fm.allowTools != null && fm.allowTools.length > 0) { + const toolFormat = options?.toolFormat ?? 'array' + result['allowTools'] = toolFormat === 'string' ? fm.allowTools.join(',') : fm.allowTools + } + + if (options?.additionalFields != null) { + // Add any additional custom fields + Object.assign(result, options.additionalFields) + } + + return result + } + + protected buildRuleContent(rule: RulePrompt, ctx?: OutputPluginContext): string { + const fmData = this.rulesConfig.transformFrontMatter ? this.rulesConfig.transformFrontMatter(rule) : {globs: rule.globs.join(', ')} + + const sanitizedFmData = fmData == null || Object.keys(fmData).length === 0 ? void 0 : fmData + + return this.buildMarkdownContent(rule.content, sanitizedFmData, ctx) + } + + protected buildRuleFileName(rule: RulePrompt): string { + const prefix = `${this.rulesConfig.prefix ?? 'rule'}${this.rulesConfig.linkSymbol ?? '-'}` + const fileName = `${prefix}${rule.prefix}${this.rulesConfig.linkSymbol ?? '-'}${rule.ruleName}${this.rulesConfig.ext ?? '.md'}` + return fileName + } + + async declareOutputFiles(ctx: OutputWriteContext): Promise { + return this.buildDefaultOutputDeclarations(ctx) + } + + async declareCleanupPaths(ctx: OutputCleanContext): Promise { + const cleanupDelete = this.buildCleanupTargetsFromScopeConfig(this.cleanupConfig.delete, 'delete', ctx) + const cleanupProtect = this.buildCleanupTargetsFromScopeConfig(this.cleanupConfig.protect, 'protect', ctx) + const {excludeScanGlobs} = this.cleanupConfig + + if (cleanupDelete.length === 0 && cleanupProtect.length === 0 && (excludeScanGlobs == null || excludeScanGlobs.length === 0)) { + return {} + } + + return { + ...cleanupDelete.length > 0 && {delete: cleanupDelete}, + ...cleanupProtect.length > 0 && {protect: cleanupProtect}, + ...excludeScanGlobs != null && excludeScanGlobs.length > 0 && {excludeScanGlobs} + } + } + + async declareWslMirrorFiles(ctx: OutputWriteContext): Promise { + void ctx + return this.wslMirrorPaths.map(sourcePath => ({sourcePath})) + } + + async convertContent(declaration: OutputFileDeclaration, ctx: OutputWriteContext): Promise { + const source = declaration.source as DeclarativeOutputSource + + switch (source.kind) { + case 'projectRootMemory': + case 'projectChildMemory': + case 'globalMemory': + case 'skillReference': + case 'ignoreFile': + return source.content + case 'command': + return this.buildCommandContent(source.command, ctx) + case 'subAgent': + return this.buildSubAgentContent(source.subAgent, ctx) + case 'skillMain': + return this.buildSkillMainContent(source.skill, ctx) + case 'skillResource': + return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + case 'rule': + return this.buildRuleContent(source.rule, ctx) + default: + throw new Error(`Unsupported declaration source for plugin ${this.name}`) + } + } + + protected async buildDefaultOutputDeclarations(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {globalMemory, commands, subAgents, skills, rules, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const transformOptions = this.getTransformOptionsFromContext(ctx) + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null ? void 0 : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + const selectedCommands + = this.commandOutputEnabled && commands != null + ? this.selectSingleScopeItems( + commands, + this.commandsConfig.sourceScopes, + cmd => this.resolveCommandSourceScope(cmd), + this.getTopicScopeOverride(ctx, 'commands') + ) + : {items: [] as readonly CommandPrompt[]} + + const selectedSubAgents + = this.subAgentOutputEnabled && subAgents != null + ? this.selectSingleScopeItems( + subAgents, + this.subAgentsConfig.sourceScopes, + subAgent => this.resolveSubAgentSourceScope(subAgent), + this.getTopicScopeOverride(ctx, 'subagents') + ) + : {items: [] as readonly SubAgentPrompt[]} + + const selectedSkills + = this.skillOutputEnabled && skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + + const allRules = rules ?? [] + const activeRuleScopes = this.ruleOutputEnabled && allRules.length > 0 ? new Set(this.selectRuleScopes(ctx, allRules)) : new Set() + const activePromptScopes = new Set(this.selectPromptScopes(ctx, this.outputCapabilities.prompt?.scopes ?? ['project', 'global'])) + + const rulesByScope: Record = { + project: [], + global: [] + } + for (const rule of allRules) { + const ruleScope = this.normalizeSourceScope(this.normalizeRuleScope(rule)) + rulesByScope[ruleScope].push(rule) + } + + for (const project of this.getProjectOutputProjects(ctx)) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + const basePath = this.resolveProjectConfigDir(ctx, project) + if (projectRootDir == null || basePath == null) continue + + if (this.outputFileName.length > 0 && activePromptScopes.has('project') && this.isProjectPromptOutputTarget(project)) { + if (project.rootMemoryPrompt != null) { + declarations.push({ + path: path.join(projectRootDir, this.outputFileName), + scope: 'project', + source: { + kind: 'projectRootMemory', + content: project.rootMemoryPrompt.content as string + } + }) + } + + if (project.childMemoryPrompts != null) { + for (const child of project.childMemoryPrompts) { + declarations.push({ + path: this.resolveFullPath(child.dir), + scope: 'project', + source: { + kind: 'projectChildMemory', + content: child.content as string + } + }) + } + } + } + + const {projectConfig} = project + + if (selectedCommands.selectedScope === 'project' && selectedCommands.items.length > 0) { + const filteredCommands = filterByProjectConfig(selectedCommands.items, projectConfig, 'commands') + this.appendCommandDeclarations(declarations, basePath, 'project', filteredCommands, transformOptions) + } + + if (selectedSubAgents.selectedScope === 'project' && selectedSubAgents.items.length > 0) { + const filteredSubAgents = filterByProjectConfig(selectedSubAgents.items, projectConfig, 'subAgents') + this.appendSubAgentDeclarations(declarations, basePath, 'project', filteredSubAgents) + } + + if (selectedSkills.selectedScope === 'project' && selectedSkills.items.length > 0) { + const filteredSkills = filterByProjectConfig(selectedSkills.items, projectConfig, 'skills') + this.appendSkillDeclarations(declarations, basePath, 'project', filteredSkills) + } + + if (activeRuleScopes.has('project')) { + const projectRules = applySubSeriesGlobPrefix(filterByProjectConfig(rulesByScope.project, projectConfig, 'rules'), projectConfig) + this.appendRuleDeclarations(declarations, basePath, 'project', projectRules) + } + + if ( + ignoreOutputPath != null + && ignoreFile != null + && project.isWorkspaceRootProject !== true + && project.isPromptSourceProject !== true + && project.dirFromWorkspacePath != null + ) { + declarations.push({ + path: path.join(project.dirFromWorkspacePath.basePath, project.dirFromWorkspacePath.path, ignoreOutputPath), + scope: 'project', + source: {kind: 'ignoreFile', content: ignoreFile.content} + }) + } + } + + const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) + + if (selectedCommands.selectedScope === 'global' && selectedCommands.items.length > 0) { + const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') + const basePath = this.getGlobalConfigDir() + this.appendCommandDeclarations(declarations, basePath, 'global', filteredCommands, transformOptions) + } + + if (selectedSubAgents.selectedScope === 'global' && selectedSubAgents.items.length > 0) { + const filteredSubAgents = filterByProjectConfig(selectedSubAgents.items, promptSourceProjectConfig, 'subAgents') + const basePath = this.getGlobalConfigDir() + this.appendSubAgentDeclarations(declarations, basePath, 'global', filteredSubAgents) + } + + if (selectedSkills.selectedScope === 'global' && selectedSkills.items.length > 0) { + const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') + const basePath = this.getGlobalConfigDir() + this.appendSkillDeclarations(declarations, basePath, 'global', filteredSkills) + } + + for (const ruleScope of ['global'] as const) { + if (!activeRuleScopes.has(ruleScope)) continue + const basePath = this.getGlobalConfigDir() + const filteredRules = applySubSeriesGlobPrefix( + filterByProjectConfig(rulesByScope[ruleScope], promptSourceProjectConfig, 'rules'), + promptSourceProjectConfig + ) + this.appendRuleDeclarations(declarations, basePath, ruleScope, filteredRules) + } + + if (globalMemory != null && this.outputFileName.length > 0 && activePromptScopes.has('global')) { + declarations.push({ + path: path.join(this.getGlobalConfigDir(), this.outputFileName), + scope: 'global', + source: { + kind: 'globalMemory', + content: globalMemory.content as string + } + }) + } + + return declarations + } + + protected async buildCommandContent(cmd: CommandPrompt, ctx?: OutputPluginContext): Promise { + let compiledContent = cmd.content + let compiledFrontMatter = cmd.yamlFrontMatter + let useRecompiledFrontMatter = false + + if (cmd.rawMdxContent != null && this.toolPreset != null) { + this.log.debug('recompiling command with tool preset', { + file: cmd.dir.getAbsolutePath(), + toolPreset: this.toolPreset, + hasRawContent: true + }) + const scopeCollector = new GlobalScopeCollector({ + toolPreset: this.toolPreset + }) + const globalScope = scopeCollector.collect() + const result = await compileRawPromptArtifact({ + filePath: cmd.dir.getAbsolutePath(), + globalScope, + rawMdx: cmd.rawMdxContent + }) + compiledContent = result.content + compiledFrontMatter = result.metadata as typeof cmd.yamlFrontMatter + useRecompiledFrontMatter = true + } + + const commandFrontMatterTransformer = this.commandsConfig.transformFrontMatter + if (commandFrontMatterTransformer == null) { + throw new Error(`commands.transformFrontMatter is required for command output plugin: ${this.name}`) + } + + const transformedFrontMatter = commandFrontMatterTransformer(cmd, { + isRecompiled: useRecompiledFrontMatter, + ...compiledFrontMatter != null && { + sourceFrontMatter: compiledFrontMatter as Record + } + }) + + return this.buildMarkdownContent(compiledContent, transformedFrontMatter, ctx) + } + + protected buildSubAgentContent(agent: SubAgentPrompt, ctx?: OutputPluginContext): string { + const subAgentFrontMatterTransformer = this.subAgentsConfig.transformFrontMatter + const transformedFrontMatter = subAgentFrontMatterTransformer?.(agent, { + ...agent.yamlFrontMatter != null && { + sourceFrontMatter: agent.yamlFrontMatter as Record + } + }) + + if (this.subAgentsConfig.artifactFormat === 'toml') { + const sourceFrontMatter = transformedFrontMatter ?? agent.yamlFrontMatter + return this.buildSubAgentTomlContent(agent, sourceFrontMatter) + } + + if (transformedFrontMatter != null) { + return this.buildMarkdownContent(agent.content, transformedFrontMatter, ctx) + } + + return this.buildMarkdownContentWithRaw(agent.content, agent.yamlFrontMatter, agent.rawFrontMatter, ctx) + } + + protected buildSkillMainContent(skill: SkillPrompt, ctx?: OutputPluginContext): string { + return this.buildMarkdownContentWithRaw(skill.content as string, skill.yamlFrontMatter, skill.rawFrontMatter, ctx) + } +} diff --git a/sdk/src/plugins/plugin-core/AbstractPlugin.ts b/sdk/src/plugins/plugin-core/AbstractPlugin.ts new file mode 100644 index 00000000..24e2e323 --- /dev/null +++ b/sdk/src/plugins/plugin-core/AbstractPlugin.ts @@ -0,0 +1,26 @@ +import type {ILogger} from '@truenine/logger' +import type {PluginKind} from './enums' +import type {Plugin} from './plugin' + +import {createLogger} from '@truenine/logger' + +export abstract class AbstractPlugin implements Plugin { + readonly type: T + + readonly name: string + + private _log?: ILogger + + get log(): ILogger { + this._log ??= createLogger(this.name) + return this._log + } + + readonly dependsOn?: readonly string[] + + protected constructor(name: string, type: T, dependsOn?: readonly string[]) { + this.name = name + this.type = type + if (dependsOn != null) this.dependsOn = dependsOn + } +} diff --git a/sdk/src/plugins/plugin-core/AindexConfigDefaults.ts b/sdk/src/plugins/plugin-core/AindexConfigDefaults.ts new file mode 100644 index 00000000..8a6e7480 --- /dev/null +++ b/sdk/src/plugins/plugin-core/AindexConfigDefaults.ts @@ -0,0 +1,123 @@ +export interface AindexDirPairLike { + readonly src: string + readonly dist: string +} + +export const AINDEX_DEFAULT_DIR_NAME = 'aindex' + +export const AINDEX_PROJECT_SERIES_NAMES = ['app', 'ext', 'arch', 'softwares'] as const + +export type AindexProjectSeriesName = (typeof AINDEX_PROJECT_SERIES_NAMES)[number] + +export const AINDEX_CONFIG_DIRECTORY_PAIR_KEYS = [ + 'skills', + 'commands', + 'subAgents', + 'rules', + ...AINDEX_PROJECT_SERIES_NAMES +] as const + +export const AINDEX_CONFIG_FILE_PAIR_KEYS = [ + 'globalPrompt', + 'workspacePrompt' +] as const + +export const AINDEX_CONFIG_PAIR_KEYS = [ + 'skills', + 'commands', + 'subAgents', + 'rules', + 'globalPrompt', + 'workspacePrompt', + ...AINDEX_PROJECT_SERIES_NAMES +] as const + +export type AindexConfigPairKey = (typeof AINDEX_CONFIG_PAIR_KEYS)[number] +export type AindexConfigDirectoryPairKey = (typeof AINDEX_CONFIG_DIRECTORY_PAIR_KEYS)[number] +export type AindexConfigFilePairKey = (typeof AINDEX_CONFIG_FILE_PAIR_KEYS)[number] +export type AindexConfigKeyPath = `aindex.${AindexConfigPairKey}.src` | `aindex.${AindexConfigPairKey}.dist` + +export const AINDEX_PROMPT_TREE_DIRECTORY_PAIR_KEYS = [ + 'skills', + 'commands', + 'subAgents', + ...AINDEX_PROJECT_SERIES_NAMES +] as const satisfies readonly AindexConfigDirectoryPairKey[] + +export type AindexPromptTreeDirectoryPairKey = (typeof AINDEX_PROMPT_TREE_DIRECTORY_PAIR_KEYS)[number] + +interface MutableAindexDirPair { + src: string + dist: string +} + +export type AindexConfigLike = { + dir: string +} & { + [K in AindexConfigPairKey]: MutableAindexDirPair +} + +export const AINDEX_CONFIG_PAIR_DEFAULTS = { + skills: {src: 'skills', dist: 'dist/skills'}, + commands: {src: 'commands', dist: 'dist/commands'}, + subAgents: {src: 'subagents', dist: 'dist/subagents'}, + rules: {src: 'rules', dist: 'dist/rules'}, + globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, + workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, + app: {src: 'app', dist: 'dist/app'}, + ext: {src: 'ext', dist: 'dist/ext'}, + arch: {src: 'arch', dist: 'dist/arch'}, + softwares: {src: 'softwares', dist: 'dist/softwares'} +} as const satisfies Record + +function buildAindexConfigKeyPaths(): readonly AindexConfigKeyPath[] { + const paths: AindexConfigKeyPath[] = [] + for (const key of AINDEX_CONFIG_PAIR_KEYS) { + paths.push(`aindex.${key}.src`, `aindex.${key}.dist`) + } + return paths +} + +function cloneAindexConfigPairs(): {[K in AindexConfigPairKey]: MutableAindexDirPair} { + return Object.fromEntries( + AINDEX_CONFIG_PAIR_KEYS.map(key => [ + key, + { + ...AINDEX_CONFIG_PAIR_DEFAULTS[key] + } + ]) + ) as {[K in AindexConfigPairKey]: MutableAindexDirPair} +} + +export const AINDEX_CONFIG_KEY_PATHS = buildAindexConfigKeyPaths() + +export function buildDefaultAindexConfig(): AindexConfigLike { + return { + dir: AINDEX_DEFAULT_DIR_NAME, + ...cloneAindexConfigPairs() + } +} + +export function mergeAindexConfig( + base: T, + override?: Partial +): T { + if (override == null) return base + + const mergedPairs = Object.fromEntries( + AINDEX_CONFIG_PAIR_KEYS.map(key => [ + key, + { + ...base[key], + ...override[key] + } + ]) + ) as {[K in AindexConfigPairKey]: T[K]} + + return { + ...base, + ...override, + dir: override.dir ?? base.dir, + ...mergedPairs + } +} diff --git a/sdk/src/plugins/plugin-core/AindexTypes.ts b/sdk/src/plugins/plugin-core/AindexTypes.ts new file mode 100644 index 00000000..599efff5 --- /dev/null +++ b/sdk/src/plugins/plugin-core/AindexTypes.ts @@ -0,0 +1,367 @@ +export { + AINDEX_PROJECT_SERIES_NAMES +} from './AindexConfigDefaults' + +export type { + AindexProjectSeriesName +} from './AindexConfigDefaults' + +/** + * Aindex directory structure types and constants + * Used for directory structure validation and generation + */ + +/** + * File entry in the aindex project + */ +export interface AindexFileEntry { + /** File name (e.g., 'GLOBAL.md') */ + readonly name: string + /** Whether this file is required */ + readonly required: boolean + /** File description */ + readonly description?: string +} + +/** + * Directory entry in the aindex project + */ +export interface AindexDirectoryEntry { + /** Directory name (e.g., 'skills') */ + readonly name: string + /** Whether this directory is required */ + readonly required: boolean + /** Directory description */ + readonly description?: string + /** Nested directories */ + readonly directories?: readonly AindexDirectoryEntry[] + /** Files in this directory */ + readonly files?: readonly AindexFileEntry[] +} + +/** + * Root structure of the aindex project + */ +export interface AindexDirectory { + /** Source directories (before compilation) */ + readonly src: { + readonly skills: AindexDirectoryEntry + readonly commands: AindexDirectoryEntry + readonly agents: AindexDirectoryEntry + readonly rules: AindexDirectoryEntry + readonly globalMemoryFile: AindexFileEntry + readonly workspaceMemoryFile: AindexFileEntry + } + /** Distribution directories (after compilation) */ + readonly dist: { + readonly skills: AindexDirectoryEntry + readonly commands: AindexDirectoryEntry + readonly agents: AindexDirectoryEntry + readonly rules: AindexDirectoryEntry + readonly app: AindexDirectoryEntry + readonly ext: AindexDirectoryEntry + readonly arch: AindexDirectoryEntry + readonly softwares: AindexDirectoryEntry + readonly globalMemoryFile: AindexFileEntry + readonly workspaceMemoryFile: AindexFileEntry + } + /** App directory (project-specific prompts source, standalone at root) */ + readonly app: AindexDirectoryEntry + readonly ext: AindexDirectoryEntry + readonly arch: AindexDirectoryEntry + readonly softwares: AindexDirectoryEntry + /** IDE configuration directories */ + readonly ide: { + readonly idea: AindexDirectoryEntry + readonly ideaCodeStyles: AindexDirectoryEntry + readonly vscode: AindexDirectoryEntry + readonly zed: AindexDirectoryEntry + } + /** IDE configuration files */ + readonly ideFiles: readonly AindexFileEntry[] + /** AI Agent ignore files */ + readonly ignoreFiles: readonly AindexFileEntry[] +} + +/** + * Directory names used in aindex project + */ +export const AINDEX_DIR_NAMES = { + SRC: 'src', + DIST: 'dist', + SKILLS: 'skills', + COMMANDS: 'commands', + AGENTS: 'agents', + RULES: 'rules', + APP: 'app', + EXT: 'ext', + ARCH: 'arch', + SOFTWARES: 'softwares', + IDEA: '.idea', // IDE directories + IDEA_CODE_STYLES: '.idea/codeStyles', + VSCODE: '.vscode', + ZED: '.zed' +} as const + +/** + * File names used in aindex project + */ +export const AINDEX_FILE_NAMES = { + GLOBAL_MEMORY: 'global.mdx', // Global memory + GLOBAL_MEMORY_SRC: 'global.src.mdx', + WORKSPACE_MEMORY: 'workspace.mdx', // Workspace memory + WORKSPACE_MEMORY_SRC: 'workspace.src.mdx', + EDITOR_CONFIG: '.editorconfig', // EditorConfig + IDEA_GITIGNORE: '.idea/.gitignore', // JetBrains IDE + IDEA_PROJECT_XML: '.idea/codeStyles/Project.xml', + IDEA_CODE_STYLE_CONFIG_XML: '.idea/codeStyles/codeStyleConfig.xml', + VSCODE_SETTINGS: '.vscode/settings.json', // VS Code + VSCODE_EXTENSIONS: '.vscode/extensions.json', + ZED_SETTINGS: '.zed/settings.json', + QODER_IGNORE: '.qoderignore', // AI Agent ignore files + CURSOR_IGNORE: '.cursorignore', + WARP_INDEX_IGNORE: '.warpindexignore', + AI_IGNORE: '.aiignore', + CODEIUM_IGNORE: '.codeiumignore' // Windsurf ignore file +} as const + +/** + * Relative paths from aindex project root + */ +export const AINDEX_RELATIVE_PATHS = { + SRC_SKILLS: 'src/skills', // Source paths + SRC_COMMANDS: 'src/commands', + SRC_AGENTS: 'src/agents', + SRC_RULES: 'src/rules', + SRC_GLOBAL_MEMORY: 'global.src.mdx', + SRC_WORKSPACE_MEMORY: 'workspace.src.mdx', + DIST_SKILLS: 'dist/skills', // Distribution paths + DIST_COMMANDS: 'dist/commands', + DIST_AGENTS: 'dist/agents', + DIST_RULES: 'dist/rules', + DIST_APP: 'dist/app', + DIST_EXT: 'dist/ext', + DIST_ARCH: 'dist/arch', + DIST_SOFTWARES: 'dist/softwares', + DIST_GLOBAL_MEMORY: 'dist/global.mdx', + DIST_WORKSPACE_MEMORY: 'dist/workspace.mdx', + APP: 'app', // App source path (standalone at root) + EXT: 'ext', + ARCH: 'arch', + SOFTWARES: 'softwares' +} as const + +/** + * Default aindex directory structure + * Used for validation and generation + */ +export const DEFAULT_AINDEX_STRUCTURE: AindexDirectory = { + src: { + skills: { + name: AINDEX_DIR_NAMES.SKILLS, + required: false, + description: 'Skill source files (.src.mdx)' + }, + commands: { + name: AINDEX_DIR_NAMES.COMMANDS, + required: false, + description: 'Fast command source files (.src.mdx)' + }, + agents: { + name: AINDEX_DIR_NAMES.AGENTS, + required: false, + description: 'Sub-agent source files (.src.mdx)' + }, + rules: { + name: AINDEX_DIR_NAMES.RULES, + required: false, + description: 'Rule source files (.src.mdx)' + }, + globalMemoryFile: { + name: AINDEX_FILE_NAMES.GLOBAL_MEMORY_SRC, + required: false, + description: 'Global memory source file' + }, + workspaceMemoryFile: { + name: AINDEX_FILE_NAMES.WORKSPACE_MEMORY_SRC, + required: false, + description: 'Workspace memory source file' + } + }, + dist: { + skills: { + name: AINDEX_DIR_NAMES.SKILLS, + required: false, + description: 'Compiled skill files (.mdx)' + }, + commands: { + name: AINDEX_DIR_NAMES.COMMANDS, + required: false, + description: 'Compiled fast command files (.mdx)' + }, + agents: { + name: AINDEX_DIR_NAMES.AGENTS, + required: false, + description: 'Compiled sub-agent files (.mdx)' + }, + rules: { + name: AINDEX_DIR_NAMES.RULES, + required: false, + description: 'Compiled rule files (.mdx)' + }, + globalMemoryFile: { + name: AINDEX_FILE_NAMES.GLOBAL_MEMORY, + required: false, + description: 'Compiled global memory file' + }, + workspaceMemoryFile: { + name: AINDEX_FILE_NAMES.WORKSPACE_MEMORY, + required: false, + description: 'Compiled workspace memory file' + }, + app: { + name: AINDEX_DIR_NAMES.APP, + required: false, + description: 'Compiled project-specific prompts' + }, + ext: { + name: AINDEX_DIR_NAMES.EXT, + required: false, + description: 'Compiled extension-specific prompts' + }, + arch: { + name: AINDEX_DIR_NAMES.ARCH, + required: false, + description: 'Compiled architecture-specific prompts' + }, + softwares: { + name: AINDEX_DIR_NAMES.SOFTWARES, + required: false, + description: 'Compiled software-repository prompts' + } + }, + app: { + name: AINDEX_DIR_NAMES.APP, + required: false, + description: 'Project-specific prompts (standalone directory)' + }, + ext: { + name: AINDEX_DIR_NAMES.EXT, + required: false, + description: 'Extension-specific prompts (standalone directory)' + }, + arch: { + name: AINDEX_DIR_NAMES.ARCH, + required: false, + description: 'Architecture-specific prompts (standalone directory)' + }, + softwares: { + name: AINDEX_DIR_NAMES.SOFTWARES, + required: false, + description: 'Software-repository prompts (standalone directory)' + }, + ide: { + idea: { + name: AINDEX_DIR_NAMES.IDEA, + required: false, + description: 'JetBrains IDE configuration directory' + }, + ideaCodeStyles: { + name: AINDEX_DIR_NAMES.IDEA_CODE_STYLES, + required: false, + description: 'JetBrains IDE code styles directory' + }, + vscode: { + name: AINDEX_DIR_NAMES.VSCODE, + required: false, + description: 'VS Code configuration directory' + }, + zed: { + name: AINDEX_DIR_NAMES.ZED, + required: false, + description: 'Zed configuration directory' + } + }, + ideFiles: [ + { + name: AINDEX_FILE_NAMES.EDITOR_CONFIG, + required: false, + description: 'EditorConfig file' + }, + { + name: AINDEX_FILE_NAMES.IDEA_GITIGNORE, + required: false, + description: 'JetBrains IDE .gitignore' + }, + { + name: AINDEX_FILE_NAMES.IDEA_PROJECT_XML, + required: false, + description: 'JetBrains IDE Project.xml' + }, + { + name: AINDEX_FILE_NAMES.IDEA_CODE_STYLE_CONFIG_XML, + required: false, + description: 'JetBrains IDE codeStyleConfig.xml' + }, + { + name: AINDEX_FILE_NAMES.VSCODE_SETTINGS, + required: false, + description: 'VS Code settings.json' + }, + { + name: AINDEX_FILE_NAMES.VSCODE_EXTENSIONS, + required: false, + description: 'VS Code extensions.json' + }, + { + name: AINDEX_FILE_NAMES.ZED_SETTINGS, + required: false, + description: 'Zed settings.json' + } + ], + ignoreFiles: [ + { + name: AINDEX_FILE_NAMES.QODER_IGNORE, + required: false, + description: 'Qoder ignore file' + }, + { + name: AINDEX_FILE_NAMES.CURSOR_IGNORE, + required: false, + description: 'Cursor ignore file' + }, + { + name: AINDEX_FILE_NAMES.WARP_INDEX_IGNORE, + required: false, + description: 'Warp index ignore file' + }, + { + name: AINDEX_FILE_NAMES.AI_IGNORE, + required: false, + description: 'AI ignore file' + }, + { + name: AINDEX_FILE_NAMES.CODEIUM_IGNORE, + required: false, + description: 'Windsurf ignore file' + } + ] +} as const + +/** + * Type for directory names + */ +export type AindexDirName + = (typeof AINDEX_DIR_NAMES)[keyof typeof AINDEX_DIR_NAMES] + +/** + * Type for file names + */ +export type AindexFileName + = (typeof AINDEX_FILE_NAMES)[keyof typeof AINDEX_FILE_NAMES] + +/** + * Type for relative paths + */ +export type AindexRelativePath + = (typeof AINDEX_RELATIVE_PATHS)[keyof typeof AINDEX_RELATIVE_PATHS] diff --git a/sdk/src/plugins/plugin-core/ConfigTypes.schema.ts b/sdk/src/plugins/plugin-core/ConfigTypes.schema.ts new file mode 100644 index 00000000..3bf047ad --- /dev/null +++ b/sdk/src/plugins/plugin-core/ConfigTypes.schema.ts @@ -0,0 +1,188 @@ +import {z} from 'zod/v3' +import { + AINDEX_CONFIG_PAIR_DEFAULTS, + AINDEX_CONFIG_PAIR_KEYS, + AINDEX_DEFAULT_DIR_NAME +} from './AindexConfigDefaults' + +/** + * Zod schema for a source/dist path pair. + * Both paths are relative to the aindex project root. + */ +export const ZAindexDirPair = z.object({src: z.string(), dist: z.string()}) + +const AINDEX_CONFIG_PAIR_SCHEMAS = Object.fromEntries( + AINDEX_CONFIG_PAIR_KEYS.map(key => [ + key, + key === 'softwares' + ? ZAindexDirPair.default(AINDEX_CONFIG_PAIR_DEFAULTS[key]) + : ZAindexDirPair + ]) +) as Record< + (typeof AINDEX_CONFIG_PAIR_KEYS)[number], + typeof ZAindexDirPair | z.ZodDefault +> + +/** + * Zod schema for the aindex configuration. + * All paths are relative to /. + */ +export const ZAindexConfig = z.object({ + dir: z.string().default(AINDEX_DEFAULT_DIR_NAME), + ...AINDEX_CONFIG_PAIR_SCHEMAS +}) + +/** + * Zod schema for per-plugin command series override options. + */ +export const ZCommandSeriesPluginOverride = z.object({ + includeSeriesPrefix: z.boolean().optional(), + seriesSeparator: z.string().optional() +}) + +/** + * Zod schema for command series configuration options. + */ +export const ZCommandSeriesOptions = z.object({ + includeSeriesPrefix: z.boolean().optional(), + pluginOverrides: z.record(z.string(), ZCommandSeriesPluginOverride).optional() +}) + +/** + * Zod schema for output scope value. + */ +export const ZOutputScope = z.enum(['project', 'global']) + +/** + * Zod schema for selecting one or more scopes. + */ +export const ZOutputScopeSelection = z.union([ZOutputScope, z.array(ZOutputScope).min(1)]) + +/** + * Zod schema for per-plugin topic scope overrides. + */ +export const ZPluginOutputScopeTopics = z.object({ + prompt: ZOutputScopeSelection.optional(), + rules: ZOutputScopeSelection.optional(), + commands: ZOutputScopeSelection.optional(), + subagents: ZOutputScopeSelection.optional(), + skills: ZOutputScopeSelection.optional(), + mcp: ZOutputScopeSelection.optional() +}) + +/** + * Zod schema for output scope override configuration. + */ +export const ZOutputScopeOptions = z.object({plugins: z.record(z.string(), ZPluginOutputScopeTopics).optional()}) + +/** + * Zod schema for shared front matter formatting options. + */ +export const ZFrontMatterOptions = z.object({blankLineAfter: z.boolean().optional()}) + +export const ZProtectionMode = z.enum(['direct', 'recursive']) +export const ZProtectionRuleMatcher = z.enum(['path', 'glob']) + +export const ZCleanupProtectionRule = z.object({ + path: z.string(), + protectionMode: ZProtectionMode, + matcher: ZProtectionRuleMatcher.optional(), + reason: z.string().optional() +}) + +export const ZCleanupProtectionOptions = z.object({rules: z.array(ZCleanupProtectionRule).optional()}) +export const ZStringOrStringArray = z.union([z.string(), z.array(z.string()).min(1)]) +export const ZWindowsWsl2Options = z.object({ + instances: ZStringOrStringArray.optional() +}) +export const ZWindowsOptions = z.object({ + wsl2: ZWindowsWsl2Options.optional() +}) + +/** + * Zod schema for user profile information. + */ +export const ZUserProfile = z.object({ + name: z.string().optional(), + username: z.string().optional(), + gender: z.string().optional(), + birthday: z.string().optional() +}).catchall(z.unknown()) + +/** + * Zod schema for the user configuration file (.tnmsc.json). + */ +export const ZUserConfigFile = z.object({ + version: z.string().optional(), + workspaceDir: z.string().optional(), + aindex: ZAindexConfig.optional(), + logLevel: z.enum(['trace', 'debug', 'info', 'warn', 'error']).optional(), + commandSeriesOptions: ZCommandSeriesOptions.optional(), + outputScopes: ZOutputScopeOptions.optional(), + frontMatter: ZFrontMatterOptions.optional(), + cleanupProtection: ZCleanupProtectionOptions.optional(), + windows: ZWindowsOptions.optional(), + profile: ZUserProfile.optional() +}) + +/** + * Zod schema for MCP project config. + */ +export const ZMcpProjectConfig = z.object({names: z.array(z.string()).optional()}) + +/** + * Zod schema for per-type series filtering configuration. + */ +export const ZTypeSeriesConfig = z.object({ + includeSeries: z.array(z.string()).optional(), + subSeries: z.record(z.string(), z.array(z.string())).optional() +}) + +/** + * Zod schema for project config. + */ +export const ZProjectConfig = z.object({ + mcp: ZMcpProjectConfig.optional(), + includeSeries: z.array(z.string()).optional(), + subSeries: z.record(z.string(), z.array(z.string())).optional(), + rules: ZTypeSeriesConfig.optional(), + skills: ZTypeSeriesConfig.optional(), + subAgents: ZTypeSeriesConfig.optional(), + commands: ZTypeSeriesConfig.optional() +}) + +/** + * Zod schema for ConfigLoader options. + */ +export const ZConfigLoaderOptions = z.object({}) + +export type AindexDirPair = z.infer +export type AindexConfig = z.infer +export type CommandSeriesPluginOverride = z.infer +export type CommandSeriesOptions = z.infer +export type OutputScope = z.infer +export type OutputScopeSelection = z.infer +export type PluginOutputScopeTopics = z.infer +export type OutputScopeOptions = z.infer +export type FrontMatterOptions = z.infer +export type ProtectionMode = z.infer +export type ProtectionRuleMatcher = z.infer +export type CleanupProtectionRule = z.infer +export type CleanupProtectionOptions = z.infer +export type StringOrStringArray = z.infer +export type WindowsWsl2Options = z.infer +export type WindowsOptions = z.infer +export type UserConfigFile = z.infer +export type McpProjectConfig = z.infer +export type TypeSeriesConfig = z.infer +export type ProjectConfig = z.infer +export type ConfigLoaderOptions = z.infer + +/** + * Result of loading a config file. + */ +export interface ConfigLoadResult { + readonly config: UserConfigFile + readonly source: string | null + readonly found: boolean +} diff --git a/sdk/src/plugins/plugin-core/DistPromptGuards.test.ts b/sdk/src/plugins/plugin-core/DistPromptGuards.test.ts new file mode 100644 index 00000000..021a392d --- /dev/null +++ b/sdk/src/plugins/plugin-core/DistPromptGuards.test.ts @@ -0,0 +1,22 @@ +import {describe, expect, it} from 'vitest' +import {assertNoResidualModuleSyntax} from './DistPromptGuards' + +describe('dist prompt guards', () => { + it('allows ordinary markdown content', () => { + expect(() => assertNoResidualModuleSyntax('# Title\n\nBody text', '/tmp/demo.mdx')).not.toThrow() + }) + + it('rejects bare module syntax outside fenced code blocks', () => { + expect(() => assertNoResidualModuleSyntax('export default\n\n# Title', '/tmp/demo.mdx')).toThrow( + 'Compiled prompt still contains residual module syntax' + ) + }) + + it('ignores module syntax inside fenced code blocks', () => { + expect(() => assertNoResidualModuleSyntax([ + '```ts', + 'export default {name: "demo"}', + '```' + ].join('\n'), '/tmp/demo.mdx')).not.toThrow() + }) +}) diff --git a/sdk/src/plugins/plugin-core/DistPromptGuards.ts b/sdk/src/plugins/plugin-core/DistPromptGuards.ts new file mode 100644 index 00000000..f0f57239 --- /dev/null +++ b/sdk/src/plugins/plugin-core/DistPromptGuards.ts @@ -0,0 +1,68 @@ +export interface MissingCompiledPromptErrorOptions { + readonly kind: string + readonly name: string + readonly sourcePath?: string + readonly expectedDistPath: string +} + +export class MissingCompiledPromptError extends Error { + readonly kind: string + + readonly nameOfPrompt: string + + readonly sourcePath?: string + + readonly expectedDistPath: string + + constructor(options: MissingCompiledPromptErrorOptions) { + const {kind, name, sourcePath, expectedDistPath} = options + super([ + `Missing compiled dist prompt for ${kind} "${name}".`, + ...sourcePath != null ? [`source: ${sourcePath}`] : [], + `expected dist: ${expectedDistPath}` + ].join(' ')) + this.name = 'MissingCompiledPromptError' + this.kind = kind + this.nameOfPrompt = name + if (sourcePath != null) this.sourcePath = sourcePath + this.expectedDistPath = expectedDistPath + } +} + +export class ResidualModuleSyntaxError extends Error { + readonly filePath: string + + readonly lineNumber: number + + constructor(filePath: string, lineNumber: number, lineContent: string) { + super(`Compiled prompt still contains residual module syntax at ${filePath}:${lineNumber}: ${lineContent.trim()}`) + this.name = 'ResidualModuleSyntaxError' + this.filePath = filePath + this.lineNumber = lineNumber + } +} + +const CODE_FENCE_PATTERN = /^\s*(```|~~~)/u +const RESIDUAL_MODULE_SYNTAX_PATTERNS = [ + /^\s*export\s+default\b/u, + /^\s*export\s+const\b/u, + /^\s*import\b/u +] + +export function assertNoResidualModuleSyntax(content: string, filePath: string): void { + let activeFence: string | undefined + const lines = content.split(/\r?\n/u) + + for (const [index, line] of lines.entries()) { + const fenceMatch = CODE_FENCE_PATTERN.exec(line) + if (fenceMatch?.[1] != null) { + const marker = fenceMatch[1] + if (activeFence == null) activeFence = marker + else if (activeFence === marker) activeFence = void 0 + continue + } + + if (activeFence != null) continue + if (RESIDUAL_MODULE_SYNTAX_PATTERNS.some(pattern => pattern.test(line))) throw new ResidualModuleSyntaxError(filePath, index + 1, line) + } +} diff --git a/sdk/src/plugins/plugin-core/ExportMetadataTypes.ts b/sdk/src/plugins/plugin-core/ExportMetadataTypes.ts new file mode 100644 index 00000000..fb969539 --- /dev/null +++ b/sdk/src/plugins/plugin-core/ExportMetadataTypes.ts @@ -0,0 +1,278 @@ +/** + * Export metadata types for MDX files + * These interfaces define the expected structure of export statements in MDX files + * that are used as front matter metadata. + * + * @module ExportMetadataTypes + */ + +import type {CodingAgentTools, NamingCaseKind, RuleScope} from './enums' +import type {SeriName} from './PromptTypes' + +/** + * Base export metadata interface + * All export metadata types should extend this + */ +export interface BaseExportMetadata { + readonly namingCase?: NamingCaseKind +} + +export interface SkillExportMetadata extends BaseExportMetadata { + readonly name?: string + readonly description: string + readonly keywords?: readonly string[] + readonly enabled?: boolean + readonly displayName?: string + readonly author?: string + readonly version?: string + readonly allowTools?: readonly (CodingAgentTools | string)[] + readonly seriName?: SeriName + readonly scope?: RuleScope +} + +export interface CommandExportMetadata extends BaseExportMetadata { + readonly description?: string + readonly argumentHint?: string + readonly allowTools?: readonly (CodingAgentTools | string)[] + readonly globalOnly?: boolean + readonly seriName?: SeriName + readonly scope?: RuleScope +} + +export interface RuleExportMetadata extends BaseExportMetadata { + readonly globs: readonly string[] + readonly description: string + readonly scope?: RuleScope + readonly seriName?: SeriName +} + +export interface SubAgentExportMetadata extends BaseExportMetadata { + readonly description: string + readonly role?: string + readonly model?: string + readonly color?: string + readonly argumentHint?: string + readonly allowTools?: readonly (CodingAgentTools | string)[] + readonly seriName?: SeriName + readonly scope?: RuleScope +} + +/** + * Metadata validation result + */ +export interface MetadataValidationResult { + readonly valid: boolean + readonly errors: readonly string[] + readonly warnings: readonly string[] +} + +/** + * Options for metadata validation + */ +export interface ValidateMetadataOptions { + readonly requiredFields: readonly (keyof T)[] + readonly optionalDefaults?: Partial + readonly filePath?: string | undefined +} + +function validateSupportedScope( + scope: unknown, + filePath?: string +): MetadataValidationResult { + const prefix = filePath != null ? ` in ${filePath}` : '' + + if (scope == null) { + return { + valid: true, + errors: [], + warnings: [] + } + } + + if (scope === 'project' || scope === 'global') { + return { + valid: true, + errors: [], + warnings: [] + } + } + + return { + valid: false, + errors: [`Field "scope" must be "project" or "global"${prefix}`], + warnings: [] + } +} + +export function validateExportMetadata( + metadata: Record, + options: ValidateMetadataOptions +): MetadataValidationResult { + const {requiredFields, optionalDefaults, filePath} = options + const errors: string[] = [] + const warnings: string[] = [] + + for (const field of requiredFields) { // Check required fields + const fieldName = String(field) + if (!(fieldName in metadata) || metadata[fieldName] == null) { + const errorMsg = filePath != null + ? `Missing required field "${fieldName}" in ${filePath}` + : `Missing required field "${fieldName}"` + errors.push(errorMsg) + } + } + + if (optionalDefaults != null) { // Check optional fields and record warnings for defaults + for (const [key, defaultValue] of Object.entries(optionalDefaults)) { + if (!(key in metadata) || metadata[key] == null) { + const warningMsg = filePath != null + ? `Using default value for optional field "${key}": ${JSON.stringify(defaultValue)} in ${filePath}` + : `Using default value for optional field "${key}": ${JSON.stringify(defaultValue)}` + warnings.push(warningMsg) + } + } + } + + return { + valid: errors.length === 0, + errors, + warnings + } +} + +/** + * Validate skill export metadata + * + * @param metadata - The metadata object to validate + * @param filePath - Optional file path for error messages + * @returns Validation result + */ +export function validateSkillMetadata( + metadata: Record, + filePath?: string +): MetadataValidationResult { + const prefix = filePath != null ? ` in ${filePath}` : '' + const errors: string[] = [] + const warnings: string[] = [] + + if (!('description' in metadata) || metadata['description'] == null) { // Check description field - must exist and not be empty + errors.push(`Missing required field "description"${prefix}`) + } else if (typeof metadata['description'] !== 'string' || metadata['description'].trim().length === 0) { + errors.push(`Required field "description" cannot be empty${prefix}`) + } + + if (metadata['enabled'] == null) { // Optional fields with defaults + warnings.push(`Using default value for optional field "enabled": true${prefix}`) + } + if (metadata['keywords'] == null) warnings.push(`Using default value for optional field "keywords": []${prefix}`) + + const scopeValidation = validateSupportedScope(metadata['scope'], filePath) + errors.push(...scopeValidation.errors) + + return { + valid: errors.length === 0, + errors, + warnings + } +} + +/** + * Validate fast command export metadata + * + * @param metadata - The metadata object to validate + * @param filePath - Optional file path for error messages + * @returns Validation result + */ +export function validateCommandMetadata( + metadata: Record, + filePath?: string +): MetadataValidationResult { + const result = validateExportMetadata(metadata, { // description is optional (can come from YAML or be omitted) // Command has no required fields from export metadata + requiredFields: [], + optionalDefaults: {}, + filePath + }) + const scopeValidation = validateSupportedScope(metadata['scope'], filePath) + + return { + valid: result.valid && scopeValidation.valid, + errors: [...result.errors, ...scopeValidation.errors], + warnings: result.warnings + } +} + +/** + * Validate sub-agent export metadata + * + * @param metadata - The metadata object to validate + * @param filePath - Optional file path for error messages + * @returns Validation result + */ +export function validateSubAgentMetadata( + metadata: Record, + filePath?: string +): MetadataValidationResult { + const result = validateExportMetadata(metadata, { + requiredFields: ['description'], + optionalDefaults: {}, + filePath + }) + const scopeValidation = validateSupportedScope(metadata['scope'], filePath) + + return { + valid: result.valid && scopeValidation.valid, + errors: [...result.errors, ...scopeValidation.errors], + warnings: result.warnings + } +} + +/** + * Validate rule export metadata + * + * @param metadata - The metadata object to validate + * @param filePath - Optional file path for error messages + * @returns Validation result + */ +export function validateRuleMetadata( + metadata: Record, + filePath?: string +): MetadataValidationResult { + const errors: string[] = [] + const warnings: string[] = [] + const prefix = filePath != null ? ` in ${filePath}` : '' + + if (!Array.isArray(metadata['globs']) || metadata['globs'].length === 0) errors.push(`Missing or empty required field "globs"${prefix}`) + else if (!metadata['globs'].every((g: unknown) => typeof g === 'string')) errors.push(`Field "globs" must be an array of strings${prefix}`) + + if (typeof metadata['description'] !== 'string' || metadata['description'].length === 0) errors.push(`Missing or empty required field "description"${prefix}`) + + const {scope, seriName} = metadata + const scopeValidation = validateSupportedScope(scope, filePath) + errors.push(...scopeValidation.errors) + + if (scope == null) warnings.push(`Using default value for optional field "scope": "project"${prefix}`) + + if (seriName != null && typeof seriName !== 'string' && !Array.isArray(seriName)) errors.push(`Field "seriName" must be a string or string array${prefix}`) + + return {valid: errors.length === 0, errors, warnings} +} + +/** + * Apply default values to metadata + * + * @param metadata - The metadata object + * @param defaults - Default values to apply + * @returns Metadata with defaults applied + */ +export function applyMetadataDefaults( + metadata: Record, + defaults: Partial +): T { + const result = {...metadata} + + for (const [key, defaultValue] of Object.entries(defaults)) { + if (!(key in result) || result[key] == null) result[key] = defaultValue + } + + return result as T +} diff --git a/sdk/src/plugins/plugin-core/GlobalScopeCollector.ts b/sdk/src/plugins/plugin-core/GlobalScopeCollector.ts new file mode 100644 index 00000000..2e6157b8 --- /dev/null +++ b/sdk/src/plugins/plugin-core/GlobalScopeCollector.ts @@ -0,0 +1,231 @@ +import type {EvaluationScope} from '@truenine/md-compiler' +import type {EnvironmentContext, MdComponent, MdxGlobalScope, OsInfo, ToolReferences, UserProfile} from '@truenine/md-compiler/globals' // Collects and manages global scope variables for MDX expression evaluation. // src/scope/GlobalScopeCollector.ts +import type {UserConfigFile} from './types' +import * as os from 'node:os' +import process from 'node:process' +import {OsKind, ShellKind, ToolPresets} from '@truenine/md-compiler/globals' +import {getEffectiveHomeDir} from '@/runtime-environment' + +/** + * Tool preset names supported by GlobalScopeCollector + */ +export type ToolPresetName = keyof typeof ToolPresets + +/** + * Options for GlobalScopeCollector + */ +export interface GlobalScopeCollectorOptions { + /** User configuration file */ + readonly userConfig?: UserConfigFile | undefined + /** Tool preset to use (default: 'default') */ + readonly toolPreset?: ToolPresetName | undefined +} + +/** + * Collects global scope variables from system, environment, and user configuration. + * The collected scope is available in MDX templates via expressions like {os.platform}, {env.NODE_ENV}, etc. + */ +export class GlobalScopeCollector { + private readonly userConfig: UserConfigFile | undefined + private readonly toolPreset: ToolPresetName + + constructor(options: GlobalScopeCollectorOptions = {}) { + this.userConfig = options.userConfig + this.toolPreset = options.toolPreset ?? 'default' + } + + collect(): MdxGlobalScope { + return { + os: this.collectOsInfo(), + env: this.collectEnvContext(), + profile: this.collectProfile(), + tool: this.collectToolReferences(), + Md: this.createMdComponent() + } + } + + private collectOsInfo(): OsInfo { + const platform = os.platform() + return { + platform, + arch: os.arch(), + hostname: os.hostname(), + homedir: getEffectiveHomeDir(), + tmpdir: os.tmpdir(), + type: os.type(), + release: os.release(), + shellKind: this.detectShellKind(), + kind: this.detectOsKind(platform) + } + } + + private detectOsKind(platform: string): OsKind { + switch (platform) { + case 'win32': return OsKind.Win + case 'darwin': return OsKind.Mac + case 'linux': + case 'freebsd': + case 'openbsd': + case 'sunos': + case 'aix': return OsKind.Linux + default: return OsKind.Unknown + } + } + + private detectShellKind(): ShellKind { + const shell = process.env['SHELL'] ?? process.env['ComSpec'] ?? '' + const s = shell.toLowerCase() + + if (s.includes('bash')) return ShellKind.Bash + if (s.includes('zsh')) return ShellKind.Zsh + if (s.includes('fish')) return ShellKind.Fish + if (s.includes('pwsh')) return ShellKind.Pwsh + if (s.includes('powershell')) return ShellKind.PowerShell + if (s.includes('cmd')) return ShellKind.Cmd + if (s.endsWith('/sh')) return ShellKind.Sh + + return ShellKind.Unknown + } + + private collectEnvContext(): EnvironmentContext { + return {...process.env} + } + + private collectProfile(): UserProfile { + if (this.userConfig?.profile != null) return this.userConfig.profile as UserProfile + return {} + } + + private collectToolReferences(): ToolReferences { + const defaults: ToolReferences = {...ToolPresets.default} + if (this.toolPreset === 'claudeCode') return {...defaults, ...ToolPresets.claudeCode} + if (this.toolPreset === 'kiro') return {...defaults, ...ToolPresets.kiro} + return defaults + } + + private createMdComponent(): MdComponent { + const mdComponent = ((props: {when?: boolean, children?: unknown}) => { + if (props.when === false) return null + return props.children + }) as MdComponent + + mdComponent.Line = (props: {when?: boolean, children?: unknown}) => { + if (props.when === false) return null + return props.children + } + + return mdComponent + } +} + +/** + * Represents a single scope registration + */ +export interface ScopeRegistration { + readonly namespace: string + readonly values: Record + readonly priority: number +} + +/** + * Priority levels for scope sources. + * Higher values take precedence over lower values during merge. + */ +export enum ScopePriority { + /** System default values (os, default tool) */ + SystemDefault = 0, + /** Values from configuration file (profile, custom tool) */ + UserConfig = 10, + /** Values registered by plugins */ + PluginRegistered = 20, + /** Values passed at MDX compile time */ + CompileTime = 30 +} + +/** + * Registry for managing and merging scopes from multiple sources. + * Handles priority-based resolution when the same key exists in multiple sources. + */ +export class ScopeRegistry { + private readonly registrations: ScopeRegistration[] = [] + private globalScope: MdxGlobalScope | null = null + + setGlobalScope(scope: MdxGlobalScope): void { + this.globalScope = scope + } + + getGlobalScope(): MdxGlobalScope | null { + return this.globalScope + } + + register( + namespace: string, + values: Record, + priority: ScopePriority = ScopePriority.PluginRegistered + ): void { + this.registrations.push({namespace, values, priority}) + } + + getRegistrations(): readonly ScopeRegistration[] { + return this.registrations + } + + merge(compileTimeScope?: EvaluationScope): EvaluationScope { + const result: EvaluationScope = {} + + if (this.globalScope != null) { // 1. First add global scope (lowest priority) + result['os'] = {...this.globalScope.os} + result['env'] = {...this.globalScope.env} + result['profile'] = {...this.globalScope.profile} + result['tool'] = {...this.globalScope.tool} + } + + const sorted = [...this.registrations].sort((a, b) => a.priority - b.priority) // 2. Sort by priority and merge registered scopes + for (const reg of sorted) result[reg.namespace] = this.deepMerge(result[reg.namespace] as Record | undefined, reg.values) + + if (compileTimeScope != null) { // 3. Finally merge compile-time scope (highest priority) + for (const [key, value] of Object.entries(compileTimeScope)) { + result[key] = typeof value === 'object' && value !== null && !Array.isArray(value) + ? this.deepMerge(result[key] as Record | undefined, value as Record) + : value + } + } + + return result + } + + private deepMerge( + target: Record | undefined, + source: Record + ): Record { + if (target == null) return {...source} + + const result = {...target} + for (const [key, value] of Object.entries(source)) { + result[key] = typeof value === 'object' + && value !== null + && !Array.isArray(value) + && typeof result[key] === 'object' + && result[key] !== null + && !Array.isArray(result[key]) + ? this.deepMerge(result[key] as Record, value as Record) + : value + } + return result + } + + resolve(expression: string): string { + const scope = this.merge() + return expression.replaceAll(/\$\{([^}]+)\}/g, (_, key: string) => { + const parts = key.split('.') + let value: unknown = scope + for (const part of parts) value = (value as Record)?.[part] + return value != null ? String(value) : `\${${key}}` + }) + } + + clear(): void { + this.registrations.length = 0 + this.globalScope = null + } +} diff --git a/sdk/src/plugins/plugin-core/InputTypes.ts b/sdk/src/plugins/plugin-core/InputTypes.ts new file mode 100644 index 00000000..e3785c2b --- /dev/null +++ b/sdk/src/plugins/plugin-core/InputTypes.ts @@ -0,0 +1,418 @@ +import type {AindexProjectSeriesName} from './AindexTypes' +import type {ProjectConfig} from './ConfigTypes.schema' +import type {FilePathKind, IDEKind, PromptKind, RuleScope} from './enums' +import type { + CommandYAMLFrontMatter, + FileContent, + GlobalMemoryPrompt, + Path, + ProjectChildrenMemoryPrompt, + ProjectRootMemoryPrompt, + Prompt, + RelativePath, + RuleYAMLFrontMatter, + SeriName, + SkillYAMLFrontMatter, + SubAgentYAMLFrontMatter +} from './PromptTypes' + +export interface Project { + readonly name?: string + readonly dirFromWorkspacePath?: RelativePath + readonly rootMemoryPrompt?: ProjectRootMemoryPrompt + readonly childMemoryPrompts?: readonly ProjectChildrenMemoryPrompt[] + readonly isPromptSourceProject?: boolean + readonly isWorkspaceRootProject?: boolean + readonly projectConfig?: ProjectConfig + readonly promptSeries?: AindexProjectSeriesName +} + +export interface Workspace { + readonly directory: Path + readonly projects: Project[] +} + +/** + * IDE configuration file + */ +export interface ProjectIDEConfigFile< + I extends IDEKind = IDEKind.Original +> extends FileContent { + readonly type: I +} + +/** + * AI Agent ignore configuration file + */ +export interface AIAgentIgnoreConfigFile { + readonly fileName: string + readonly content: string + readonly sourcePath?: string +} + +/** + * Shared context fields across input aggregation and output execution. + */ +interface CollectedContextData { + readonly workspace: Workspace + + /** Flat prompt projections used by current output plugins */ + readonly skills?: readonly SkillPrompt[] + readonly commands?: readonly CommandPrompt[] + readonly subAgents?: readonly SubAgentPrompt[] + readonly rules?: readonly RulePrompt[] + readonly readmePrompts?: readonly ReadmePrompt[] + readonly globalMemory?: GlobalMemoryPrompt + + /** Other non-prompt fields */ + readonly vscodeConfigFiles?: readonly ProjectIDEConfigFile[] + readonly zedConfigFiles?: readonly ProjectIDEConfigFile[] + readonly jetbrainsConfigFiles?: readonly ProjectIDEConfigFile[] + readonly editorConfigFiles?: readonly ProjectIDEConfigFile[] + readonly aiAgentIgnoreConfigFiles?: readonly AIAgentIgnoreConfigFile[] + readonly globalGitIgnore?: string + readonly shadowGitExclude?: string + readonly aindexDir?: string +} + +/** + * Input-side collected context. + * Built incrementally by input plugins through dependency-aware merging. + */ +export interface InputCollectedContext extends CollectedContextData {} + +/** + * Output-side collected context. + * Produced once from input context and consumed by output plugins only. + */ +export interface OutputCollectedContext extends CollectedContextData {} + +/** + * Convert input context to output context boundary object. + * This keeps input and output stages decoupled while preserving data shape. + */ +export function toOutputCollectedContext( + input: InputCollectedContext +): OutputCollectedContext { + return { + workspace: { + directory: input.workspace.directory, + projects: [...input.workspace.projects] + }, + ...input.skills != null && {skills: [...input.skills]}, + ...input.commands != null && {commands: [...input.commands]}, + ...input.subAgents != null && {subAgents: [...input.subAgents]}, + ...input.rules != null && {rules: [...input.rules]}, + ...input.readmePrompts != null && { + readmePrompts: [...input.readmePrompts] + }, + ...input.globalMemory != null && {globalMemory: input.globalMemory}, + ...input.vscodeConfigFiles != null && { + vscodeConfigFiles: [...input.vscodeConfigFiles] + }, + ...input.zedConfigFiles != null && { + zedConfigFiles: [...input.zedConfigFiles] + }, + ...input.jetbrainsConfigFiles != null && { + jetbrainsConfigFiles: [...input.jetbrainsConfigFiles] + }, + ...input.editorConfigFiles != null && { + editorConfigFiles: [...input.editorConfigFiles] + }, + ...input.aiAgentIgnoreConfigFiles != null && { + aiAgentIgnoreConfigFiles: [...input.aiAgentIgnoreConfigFiles] + }, + ...input.globalGitIgnore != null && { + globalGitIgnore: input.globalGitIgnore + }, + ...input.shadowGitExclude != null && { + shadowGitExclude: input.shadowGitExclude + }, + ...input.aindexDir != null && {aindexDir: input.aindexDir} + } +} + +/** + * Rule prompt with glob patterns for file-scoped rule application + */ +export interface RulePrompt extends Prompt< + PromptKind.Rule, + RuleYAMLFrontMatter, + FilePathKind.Relative, + RelativePath, + string +> { + readonly type: PromptKind.Rule + readonly prefix: string + readonly ruleName: string + readonly globs: readonly string[] + readonly scope: RuleScope + readonly seriName?: SeriName + readonly rawMdxContent?: string +} + +/** + * Command prompt + */ +export interface CommandPrompt extends Prompt< + PromptKind.Command, + CommandYAMLFrontMatter, + FilePathKind.Relative, + RelativePath, + string +> { + readonly type: PromptKind.Command + readonly globalOnly?: true + readonly commandPrefix?: string + readonly commandName: string + readonly seriName?: SeriName + readonly rawMdxContent?: string +} + +/** + * Sub-agent prompt + */ +export interface SubAgentPrompt extends Prompt< + PromptKind.SubAgent, + SubAgentYAMLFrontMatter, + FilePathKind.Relative, + RelativePath, + string +> { + readonly type: PromptKind.SubAgent + readonly agentPrefix?: string + readonly agentName: string + readonly canonicalName: string + readonly seriName?: SeriName + readonly rawMdxContent?: string +} + +/** + * Skill child document (.md files in skill directory or any subdirectory) + * Excludes skill.md which is the main skill file + */ +export interface SkillChildDoc extends Prompt { + readonly type: PromptKind.SkillChildDoc + readonly relativePath: string +} + +/** + * Resource content encoding type + */ +export type SkillResourceEncoding = 'text' | 'base64' + +/** + * Skill resource file for AI on-demand access + * Any non-.md file in skill directory or subdirectories + * + * Supports: + * - Code files: .kt, .java, .py, .ts, .js, .go, .rs, .c, .cpp, etc. + * - Data files: .sql, .json, .xml, .yaml, .csv, etc. + * - Documents: .txt, .rtf, .docx, .pdf, etc. + * - Config files: .ini, .conf, .properties, etc. + * - Scripts: .sh, .bash, .ps1, .bat, etc. + * - Images: .png, .jpg, .gif, .svg, .webp, etc. + * - Binary files: .exe, .dll, .wasm, etc. + */ +export interface SkillResource { + readonly type: PromptKind.SkillResource + readonly extension: string + readonly fileName: string + readonly relativePath: string + readonly sourcePath?: string + readonly content: string + readonly encoding: SkillResourceEncoding + readonly length: number + readonly mimeType?: string +} + +/** + * MCP server configuration entry + */ +export interface McpServerConfig { + readonly command: string + readonly args?: readonly string[] + readonly env?: Readonly> + readonly disabled?: boolean + readonly autoApprove?: readonly string[] +} + +/** + * Skill MCP configuration (mcp.json) + * - Kiro: supports per-power MCP configuration natively + * - Others: may support lazy loading in the future + */ +export interface SkillMcpConfig { + readonly type: PromptKind.SkillMcpConfig + readonly mcpServers: Readonly> + readonly rawContent: string +} + +export interface SkillPrompt extends Prompt< + PromptKind.Skill, + SkillYAMLFrontMatter +> { + readonly type: PromptKind.Skill + readonly dir: RelativePath + readonly skillName: string + readonly yamlFrontMatter: SkillYAMLFrontMatter + readonly mcpConfig?: SkillMcpConfig + readonly childDocs?: SkillChildDoc[] + readonly resources?: SkillResource[] + readonly seriName?: SeriName +} + +/** + * Readme-family source file kind + * + * - Readme: rdm.mdx → README.md + * - CodeOfConduct: coc.mdx → CODE_OF_CONDUCT.md + * - Security: security.mdx → SECURITY.md + */ +export type ReadmeFileKind = 'Readme' | 'CodeOfConduct' | 'Security' + +/** + * Mapping from ReadmeFileKind to source/output file names + */ +export const README_FILE_KIND_MAP: Readonly< + Record +> = { + Readme: {src: 'rdm.mdx', out: 'README.md'}, + CodeOfConduct: {src: 'coc.mdx', out: 'CODE_OF_CONDUCT.md'}, + Security: {src: 'security.mdx', out: 'SECURITY.md'} +} + +/** + * README-family prompt data structure (README.md, CODE_OF_CONDUCT.md, SECURITY.md) + */ +export interface ReadmePrompt extends Prompt { + readonly type: PromptKind.Readme + readonly projectName: string + readonly targetDir: RelativePath + readonly isRoot: boolean + readonly fileKind: ReadmeFileKind +} + +/** + * Supported locale codes + */ +export type Locale = 'zh' | 'en' + +export type LocalizedFileExtension = string | readonly string[] + +/** + * Localized content wrapper for a single locale + * Contains both compiled content and raw MDX source + */ +export interface LocalizedContent { + /** Compiled/processed content */ + readonly content: string + + /** Original MDX source (before compilation) */ + readonly rawMdx?: string + + /** Extracted front matter */ + readonly frontMatter?: Record + + /** File last modified timestamp */ + readonly lastModified: Date + + /** Full prompt object (optional, for extended access) */ + readonly prompt?: T + + /** Absolute file path */ + readonly filePath: string +} + +/** + * Source content container for all locales + */ +export interface LocalizedSource { + /** Default source content (.src.mdx) */ + readonly zh?: LocalizedContent + + /** English content (.mdx) */ + readonly en?: LocalizedContent + + /** Default locale content (typically zh) */ + readonly default: LocalizedContent + + /** Which locale is the default */ + readonly defaultLocale: Locale +} + +/** Universal localized prompt wrapper */ +export interface LocalizedPrompt< + T extends Prompt = Prompt, + K extends PromptKind = PromptKind +> { + readonly name: string // Prompt identifier name + readonly type: K // Prompt type kind + readonly src?: LocalizedSource // Source files content (src directory, optional when dist-only) + readonly dist?: LocalizedContent // Compiled/dist content (dist directory, optional) + + /** Metadata flags */ + readonly metadata: { + readonly hasDist: boolean // Whether dist content exists + readonly hasMultipleLocales: boolean // Whether multiple locales exist in src + readonly isDirectoryStructure: boolean // Whether this is a directory-based prompt (like skills) + + /** Available child items (for directory structures) */ + readonly children?: string[] + } + + /** File paths for all variants */ + readonly paths: { + readonly zh?: string + readonly en?: string + readonly dist?: string + } +} + +/** + * Options for reading localized prompts from different structures + */ +export interface LocalizedReadOptions { + /** File extensions for each locale */ + readonly localeExtensions: { + readonly zh: LocalizedFileExtension + readonly en: LocalizedFileExtension + } + + /** Entry file name (without extension, e.g., 'skill' for skills) */ + readonly entryFileName?: string + + /** Whether source contents should be hydrated and compiled in addition to dist */ + readonly hydrateSourceContents?: boolean + + /** Create prompt from content */ + readonly createPrompt: ( + content: string, + locale: Locale, + name: string, + metadata?: Record + ) => T | Promise + + /** Prompt kind */ + readonly kind: K + + /** Whether this is a directory-based structure */ + readonly isDirectoryStructure: boolean +} + +/** + * Result of reading a directory structure (like skills) + */ +export interface DirectoryReadResult { + readonly prompts: LocalizedPrompt[] + readonly errors: ReadError[] +} + +/** + * Error during reading + */ +export interface ReadError { + readonly path: string + readonly error: Error + readonly phase: 'scan' | 'read' | 'compile' +} diff --git a/sdk/src/plugins/plugin-core/LocalizedPromptReader.ts b/sdk/src/plugins/plugin-core/LocalizedPromptReader.ts new file mode 100644 index 00000000..6765aa08 --- /dev/null +++ b/sdk/src/plugins/plugin-core/LocalizedPromptReader.ts @@ -0,0 +1,736 @@ +import type {MdxGlobalScope} from '@truenine/md-compiler/globals' +import type {PromptCompilerDiagnosticContext} from './PromptCompilerDiagnostics' +import type { + DirectoryReadResult, + ILogger, + Locale, + LocalizedContent, + LocalizedFileExtension, + LocalizedPrompt, + LocalizedReadOptions, + LoggerDiagnosticInput, + Prompt, + PromptKind, + ReadError +} from './types' +import { + buildDiagnostic, + buildFileOperationDiagnostic, + buildPromptCompilerDiagnostic, + diagnosticLines +} from '@/diagnostics' +import { + assertNoResidualModuleSyntax, + MissingCompiledPromptError, + ResidualModuleSyntaxError +} from './DistPromptGuards' +import {readPromptArtifact} from './PromptArtifactCache' +import { + formatPromptCompilerDiagnostic, + resolveSourcePathForDistFile +} from './PromptCompilerDiagnostics' + +function shouldFailFast(error: unknown): boolean { + return error instanceof MissingCompiledPromptError || error instanceof ResidualModuleSyntaxError +} + +interface ReaderDiagnosticContext { + readonly promptKind: string + readonly logicalName: string + readonly entryDistPath: string + readonly srcPath?: string +} + +/** + * Universal reader for localized prompts + * Handles reading src (multiple locales) and dist (compiled) content + * Supports directory structures (skills) and flat files (commands, subAgents) + * + * Dist is the only prompt source that may flow into final outputs. + * Source files are read only for discovery, locale metadata, and validation. + */ +export class LocalizedPromptReader { + constructor( + private fs: typeof import('node:fs'), + private path: typeof import('node:path'), + private logger: ILogger, + private globalScope?: MdxGlobalScope + ) {} + + async readDirectoryStructure< + T extends Prompt, + K extends PromptKind + >( + srcDir: string, + distDir: string, + options: LocalizedReadOptions + ): Promise> { + const prompts: LocalizedPrompt[] = [] + const errors: ReadError[] = [] + + if (!this.exists(srcDir)) return {prompts, errors} + + try { + const entries = this.fs.readdirSync(srcDir, {withFileTypes: true}) + + for (const entry of entries) { + if (!entry.isDirectory()) continue + + const {name} = entry + const srcEntryDir = this.path.join(srcDir, name) + const distEntryDir = this.path.join(distDir, name) + + try { + const localized = await this.readEntry( + name, + srcEntryDir, + distEntryDir, + options, + true + ) + + if (localized) prompts.push(localized) + } catch (error) { + errors.push({ + path: srcEntryDir, + error: error as Error, + phase: 'read' + }) + this.logger.error(buildFileOperationDiagnostic({ + code: 'LOCALIZED_PROMPT_ENTRY_READ_FAILED', + title: 'Failed to read localized prompt entry', + operation: 'read', + targetKind: `${String(options.kind)} prompt entry`, + path: srcEntryDir, + error, + details: { + entryName: name, + promptKind: String(options.kind) + } + })) + if (shouldFailFast(error)) throw error + } + } + } catch (error) { + errors.push({ + path: srcDir, + error: error as Error, + phase: 'scan' + }) + this.logger.error(buildFileOperationDiagnostic({ + code: 'LOCALIZED_PROMPT_DIRECTORY_SCAN_FAILED', + title: 'Failed to scan localized prompt source directory', + operation: 'scan', + targetKind: `${String(options.kind)} prompt source directory`, + path: srcDir, + error, + details: { + promptKind: String(options.kind) + } + })) + if (shouldFailFast(error)) throw error + } + + return {prompts, errors} + } + + async readFlatFiles< + T extends Prompt, + K extends PromptKind + >( + srcDir: string, + distDir: string, + options: LocalizedReadOptions + ): Promise> { + const prompts: LocalizedPrompt[] = [] + const errors: ReadError[] = [] + + const srcExists = this.exists(srcDir) + const distExists = this.exists(distDir) + + this.logger.debug(`readFlatFiles: srcDir=${srcDir}, exists=${srcExists}`) + this.logger.debug(`readFlatFiles: distDir=${distDir}, exists=${distExists}`) + + if (!srcExists && !distExists) return {prompts, errors} + + const zhExtensions = this.normalizeExtensions(options.localeExtensions.zh) + const seenNames = new Set() + + const readPrompt = async (fullName: string, filePath: string): Promise => { + if (seenNames.has(fullName)) return + seenNames.add(fullName) + + try { + const localized = await this.readFlatEntry( + fullName, + srcDir, + distDir, + fullName, + options + ) + + if (localized) prompts.push(localized) + } catch (error) { + errors.push({ + path: filePath, + error: error as Error, + phase: 'read' + }) + this.logger.error(buildFileOperationDiagnostic({ + code: 'LOCALIZED_PROMPT_FILE_READ_FAILED', + title: 'Failed to read localized prompt file', + operation: 'read', + targetKind: `${String(options.kind)} prompt file`, + path: filePath, + error, + details: { + promptKind: String(options.kind), + logicalName: fullName + } + })) + if (shouldFailFast(error)) throw error + } + } + + const scanSourceDirectory = async (currentSrcDir: string, relativePath: string = ''): Promise => { + if (!this.exists(currentSrcDir)) return + + try { + const entries = this.fs.readdirSync(currentSrcDir, {withFileTypes: true}) + for (const entry of entries) { + const entryRelativePath = relativePath + ? this.path.join(relativePath, entry.name) + : entry.name + + if (entry.isDirectory()) { + await scanSourceDirectory(this.path.join(currentSrcDir, entry.name), entryRelativePath) + continue + } + + const matchedExtension = this.findMatchingExtension(entry.name, zhExtensions) + if (!entry.isFile() || matchedExtension == null) continue + + const baseName = entry.name.slice(0, -matchedExtension.length) + const fullName = relativePath + ? this.path.join(relativePath, baseName) + : baseName + + await readPrompt(fullName, this.path.join(currentSrcDir, entry.name)) + } + } catch (error) { + errors.push({ + path: currentSrcDir, + error: error as Error, + phase: 'scan' + }) + this.logger.error(buildFileOperationDiagnostic({ + code: 'LOCALIZED_SOURCE_DIRECTORY_SCAN_FAILED', + title: 'Failed to scan localized source directory', + operation: 'scan', + targetKind: `${String(options.kind)} source directory`, + path: currentSrcDir, + error, + details: { + promptKind: String(options.kind) + } + })) + if (shouldFailFast(error)) throw error + } + } + + const scanDistDirectory = async (currentDistDir: string, relativePath: string = ''): Promise => { + if (!this.exists(currentDistDir)) return + + try { + const entries = this.fs.readdirSync(currentDistDir, {withFileTypes: true}) + for (const entry of entries) { + const entryRelativePath = relativePath + ? this.path.join(relativePath, entry.name) + : entry.name + + if (entry.isDirectory()) { + await scanDistDirectory(this.path.join(currentDistDir, entry.name), entryRelativePath) + continue + } + + if (!entry.isFile() || !entry.name.endsWith('.mdx')) continue + + const baseName = entry.name.slice(0, -'.mdx'.length) + const fullName = relativePath + ? this.path.join(relativePath, baseName) + : baseName + + await readPrompt(fullName, this.path.join(currentDistDir, entry.name)) + } + } catch (error) { + errors.push({ + path: currentDistDir, + error: error as Error, + phase: 'scan' + }) + this.logger.error(buildFileOperationDiagnostic({ + code: 'LOCALIZED_DIST_DIRECTORY_SCAN_FAILED', + title: 'Failed to scan localized dist directory', + operation: 'scan', + targetKind: `${String(options.kind)} dist directory`, + path: currentDistDir, + error, + details: { + promptKind: String(options.kind) + } + })) + if (shouldFailFast(error)) throw error + } + } + + if (srcExists) await scanSourceDirectory(srcDir) + if (distExists) await scanDistDirectory(distDir) + + return {prompts, errors} + } + + async readSingleFile< + T extends Prompt, + K extends PromptKind + >( + srcBasePath: string, // Path without extension + distBasePath: string, + options: LocalizedReadOptions + ): Promise | null> { + const name = this.path.basename(srcBasePath) + + return this.readFlatEntry(name, this.path.dirname(srcBasePath), this.path.dirname(distBasePath), srcBasePath, options, true) + } + + private async readEntry< + T extends Prompt, + K extends PromptKind + >( + name: string, + srcEntryDir: string, + distEntryDir: string, + options: LocalizedReadOptions, + isDirectoryStructure = true + ): Promise | null> { + const {localeExtensions, entryFileName, createPrompt, kind} = options + const hydrateSourceContents = options.hydrateSourceContents ?? true + + const baseFileName = entryFileName ?? name + const zhExtensions = this.normalizeExtensions(localeExtensions.zh) + const enExtensions = this.normalizeExtensions(localeExtensions.en) + const srcZhPath = this.resolveLocalizedPath(srcEntryDir, baseFileName, zhExtensions) + const srcEnPath = this.resolveLocalizedPath(srcEntryDir, baseFileName, enExtensions) + const distPath = this.path.join(distEntryDir, `${baseFileName}.mdx`) + const hasSourceZh = this.exists(srcZhPath) + const hasSourceEn = this.exists(srcEnPath) + const existingSourcePath = hasSourceZh + ? srcZhPath + : hasSourceEn + ? srcEnPath + : void 0 + const diagnosticContext: ReaderDiagnosticContext = { + promptKind: String(kind), + logicalName: name, + entryDistPath: distPath, + ...existingSourcePath != null && {srcPath: existingSourcePath} + } + + const distContent = await this.readDistContent(distPath, createPrompt, name, diagnosticContext) + const zhContent = hasSourceZh && hydrateSourceContents + ? await this.readLocaleContent(srcZhPath, 'zh', createPrompt, name, String(kind)) + : null + const enContent = hasSourceEn && hydrateSourceContents + ? await this.readLocaleContent(srcEnPath, 'en', createPrompt, name, String(kind)) + : null + + const hasDist = distContent != null + const sourcePath = hasSourceZh ? srcZhPath : hasSourceEn ? srcEnPath : void 0 + + if (!hasDist && !hasSourceZh && !hasSourceEn) { + this.logger.warn(buildDiagnostic({ + code: 'LOCALIZED_PROMPT_ARTIFACTS_MISSING', + title: `Missing source and dist prompt artifacts for ${name}`, + rootCause: diagnosticLines( + `tnmsc could not find either the source prompt or the compiled dist prompt for "${name}".` + ), + exactFix: diagnosticLines( + 'Create the source prompt and rebuild the compiled dist prompt before retrying tnmsc.' + ), + details: { + promptKind: String(kind), + name, + srcZhPath, + srcEnPath, + distPath + } + })) + return null + } + + if (!hasDist) { + throw new MissingCompiledPromptError({ + kind: String(kind), + name, + ...sourcePath != null && {sourcePath}, + expectedDistPath: distPath + }) + } + + const src: LocalizedPrompt['src'] = hydrateSourceContents && zhContent != null + ? { + zh: zhContent, + ...enContent != null && {en: enContent}, + default: zhContent, + defaultLocale: 'zh' + } + : void 0 + + const children = isDirectoryStructure + ? this.scanChildren(distEntryDir, baseFileName, ['.mdx']) + : void 0 + + return { + name, + type: kind, + ...src != null && {src}, + ...hasDist && {dist: distContent}, + metadata: { + hasDist, + hasMultipleLocales: hasSourceEn, + isDirectoryStructure, + ...children && children.length > 0 && {children} + }, + paths: { + ...hasSourceZh && {zh: srcZhPath}, + ...hasSourceEn && {en: srcEnPath}, + ...hasDist && {dist: distPath} + } + } + } + + private async readFlatEntry< + T extends Prompt, + K extends PromptKind + >( + name: string, + srcDir: string, + distDir: string, + baseName: string, + options: LocalizedReadOptions, + isSingleFile = false + ): Promise | null> { + const {localeExtensions, createPrompt, kind} = options + const hydrateSourceContents = options.hydrateSourceContents ?? true + + const zhExtensions = this.normalizeExtensions(localeExtensions.zh) + const enExtensions = this.normalizeExtensions(localeExtensions.en) + const srcZhPath = this.resolveLocalizedPath('', baseName, zhExtensions) + const srcEnPath = this.resolveLocalizedPath('', baseName, enExtensions) + const distPath = this.path.join(distDir, `${name}.mdx`) + + const fullSrcZhPath = isSingleFile ? srcZhPath : this.path.join(srcDir, srcZhPath) + const fullSrcEnPath = isSingleFile ? srcEnPath : this.path.join(srcDir, srcEnPath) + const hasSourceZh = this.exists(fullSrcZhPath) + const hasSourceEn = this.exists(fullSrcEnPath) + const existingSourcePath = hasSourceZh + ? fullSrcZhPath + : hasSourceEn + ? fullSrcEnPath + : void 0 + const diagnosticContext: ReaderDiagnosticContext = { + promptKind: String(kind), + logicalName: name, + entryDistPath: distPath, + ...existingSourcePath != null && {srcPath: existingSourcePath} + } + + const distContent = await this.readDistContent(distPath, createPrompt, name, diagnosticContext) + const zhContent = hasSourceZh && hydrateSourceContents + ? await this.readLocaleContent(fullSrcZhPath, 'zh', createPrompt, name, String(kind)) + : null + const enContent = hasSourceEn && hydrateSourceContents + ? await this.readLocaleContent(fullSrcEnPath, 'en', createPrompt, name, String(kind)) + : null + + const hasDist = distContent != null + const sourcePath = hasSourceZh ? fullSrcZhPath : hasSourceEn ? fullSrcEnPath : void 0 + + if (!hasDist && !hasSourceZh && !hasSourceEn) { + this.logger.warn(buildDiagnostic({ + code: 'LOCALIZED_PROMPT_ARTIFACTS_MISSING', + title: `Missing source and dist prompt artifacts for ${name}`, + rootCause: diagnosticLines( + `tnmsc could not find either the source prompt or the compiled dist prompt for "${name}".` + ), + exactFix: diagnosticLines( + 'Create the source prompt and rebuild the compiled dist prompt before retrying tnmsc.' + ), + details: { + promptKind: String(kind), + name, + srcZhPath: fullSrcZhPath, + srcEnPath: fullSrcEnPath, + distPath + } + })) + return null + } + + if (!hasDist) { + throw new MissingCompiledPromptError({ + kind: String(kind), + name, + ...sourcePath != null && {sourcePath}, + expectedDistPath: distPath + }) + } + + const src: LocalizedPrompt['src'] = hydrateSourceContents && zhContent != null + ? { + zh: zhContent, + ...enContent != null && {en: enContent}, + default: zhContent, + defaultLocale: 'zh' + } + : void 0 + + return { + name, + type: kind, + ...src != null && {src}, + ...hasDist && {dist: distContent}, + metadata: { + hasDist, + hasMultipleLocales: hasSourceEn, + isDirectoryStructure: false + }, + paths: { + ...hasSourceZh && {zh: fullSrcZhPath}, + ...hasSourceEn && {en: fullSrcEnPath}, + ...hasDist && {dist: distPath} + } + } + } + + private async readLocaleContent( + filePath: string, + locale: Locale, + createPrompt: (content: string, locale: Locale, name: string, metadata?: Record) => T | Promise, + name: string, + promptKind: string + ): Promise | null> { + if (!this.exists(filePath)) return null + + try { + const artifact = await readPromptArtifact(filePath, { + mode: 'source', + globalScope: this.globalScope + }) + assertNoResidualModuleSyntax(artifact.content, filePath) + + const prompt = await createPrompt(artifact.content, locale, name, artifact.metadata) + + const result: LocalizedContent = { + content: artifact.content, + lastModified: artifact.lastModified, + filePath + } + + if (artifact.rawMdx.length > 0) { + Object.assign(result, {rawMdx: artifact.rawMdx}) + } + if (artifact.parsed.yamlFrontMatter != null) Object.assign(result, {frontMatter: artifact.parsed.yamlFrontMatter}) + if (prompt != null) Object.assign(result, {prompt}) + + return result + } catch (error) { + this.logger.error(buildPromptCompilerDiagnostic({ + code: 'LOCALIZED_SOURCE_PROMPT_READ_FAILED', + title: 'Failed to read localized source prompt', + diagnosticText: formatPromptCompilerDiagnostic(error, { + operation: 'Failed to read localized source prompt.', + promptKind, + logicalName: name, + distPath: filePath + }), + details: { + promptKind, + locale, + filePath + } + })) + throw error + } + } + + private async readDistContent( + filePath: string, + createPrompt: (content: string, locale: Locale, name: string, metadata?: Record) => T | Promise, + name: string, + diagnosticContext: ReaderDiagnosticContext + ): Promise | null> { + if (!this.exists(filePath)) return null + + try { + const artifact = await readPromptArtifact(filePath, { + mode: 'dist', + globalScope: this.globalScope + }) + assertNoResidualModuleSyntax(artifact.content, filePath) + + const prompt = await createPrompt( + artifact.content, + 'zh', + name, + artifact.metadata + ) + + const result: LocalizedContent = { + content: artifact.content, + lastModified: artifact.lastModified, + prompt, + filePath, + rawMdx: artifact.rawMdx + } + + if (artifact.parsed.yamlFrontMatter != null) Object.assign(result, {frontMatter: artifact.parsed.yamlFrontMatter}) + return result + } catch (error) { + this.logger.error(this.buildDistReadDiagnostic(error, filePath, diagnosticContext)) + throw error + } + } + + private buildDistReadDiagnostic( + error: unknown, + filePath: string, + context: ReaderDiagnosticContext + ): LoggerDiagnosticInput { + const mappedSourcePath = resolveSourcePathForDistFile(this.path, filePath, { + preferredSourcePath: filePath === context.entryDistPath ? context.srcPath : void 0, + distRootDir: this.path.dirname(context.entryDistPath), + srcRootDir: context.srcPath != null ? this.path.dirname(context.srcPath) : void 0 + }) + const formattedContext: PromptCompilerDiagnosticContext = { + operation: 'Failed to read dist content.', + promptKind: context.promptKind, + logicalName: context.logicalName, + entryDistPath: context.entryDistPath, + distPath: filePath, + srcPath: mappedSourcePath + } + return buildPromptCompilerDiagnostic({ + code: 'LOCALIZED_DIST_PROMPT_READ_FAILED', + title: 'Failed to read localized dist prompt', + diagnosticText: formatPromptCompilerDiagnostic(error, formattedContext), + details: { + promptKind: context.promptKind, + logicalName: context.logicalName, + filePath, + srcPath: mappedSourcePath + } + }) + } + + private scanChildren( + dir: string, + entryFileName: string, + zhExtensions: readonly string[] + ): string[] { + const children: string[] = [] + + if (!this.exists(dir)) return children + + const entryFullNames = new Set(zhExtensions.map(extension => `${entryFileName}${extension}`)) + + try { + const scanDir = (currentDir: string, relativePath: string): void => { + const entries = this.fs.readdirSync(currentDir, {withFileTypes: true}) + + for (const entry of entries) { + const fullPath = this.path.join(currentDir, entry.name) + const relativeFullPath = relativePath + ? this.path.join(relativePath, entry.name) + : entry.name + + if (entry.isDirectory()) scanDir(fullPath, relativeFullPath) + else { + const matchedExtension = this.findMatchingExtension(entry.name, zhExtensions) + if (matchedExtension == null || entryFullNames.has(entry.name)) continue + + const nameWithoutExt = entry.name.slice(0, -matchedExtension.length) // Child doc: relative path without extension + const relativeDir = this.path.dirname(relativeFullPath) + const childPath = relativeDir === '.' + ? nameWithoutExt + : this.path.join(relativeDir, nameWithoutExt) + children.push(childPath) + } + } + } + + scanDir(dir, '') + } catch (error) { + this.logger.warn(buildFileOperationDiagnostic({ + code: 'LOCALIZED_PROMPT_CHILD_SCAN_FAILED', + title: 'Failed to scan localized prompt child documents', + operation: 'scan', + targetKind: 'localized prompt child directory', + path: dir, + error + })) + } + + return children + } + + private exists(path: string): boolean { + try { + return this.fs.existsSync(path) + } catch { + return false + } + } + + private normalizeExtensions(extension: LocalizedFileExtension): readonly string[] { + return typeof extension === 'string' + ? [extension] + : extension + } + + private findMatchingExtension(fileName: string, extensions: readonly string[]): string | undefined { + return extensions.find(extension => fileName.endsWith(extension)) + } + + private resolveLocalizedPath(dir: string, baseFileName: string, extensions: readonly string[]): string { + const defaultPath = dir === '' + ? `${baseFileName}${extensions[0]}` + : this.path.join(dir, `${baseFileName}${extensions[0]}`) + + for (const extension of extensions) { + const candidate = dir === '' + ? `${baseFileName}${extension}` + : this.path.join(dir, `${baseFileName}${extension}`) + if (this.exists(candidate)) return candidate + } + + return defaultPath + } +} + +/** + * Factory function to create a LocalizedPromptReader + */ +export function createLocalizedPromptReader( + fs: typeof import('node:fs'), + path: typeof import('node:path'), + logger: ILogger, + globalScope?: MdxGlobalScope +): LocalizedPromptReader { + return new LocalizedPromptReader(fs, path, logger, globalScope) +} + +export { + type DirectoryReadResult, + type LocalizedReadOptions, + type ReadError +} from './types' diff --git a/sdk/src/plugins/plugin-core/McpConfigManager.ts b/sdk/src/plugins/plugin-core/McpConfigManager.ts new file mode 100644 index 00000000..6ed7e68c --- /dev/null +++ b/sdk/src/plugins/plugin-core/McpConfigManager.ts @@ -0,0 +1,251 @@ +import type {ILogger, McpServerConfig, SkillPrompt} from './types' +import * as path from 'node:path' +import {buildFileOperationDiagnostic} from '@/diagnostics' +import {resolveSkillName} from './PromptIdentity' + +/** + * MCP configuration format type + */ +export type McpConfigFormat = 'cursor' | 'opencode' + +/** + * MCP config entry for a single server + */ +export interface McpServerEntry { + readonly name: string + readonly config: McpServerConfig +} + +/** + * Transformed MCP server config for different output formats + */ +export interface TransformedMcpConfig { + [serverName: string]: Record +} + +/** + * Result of MCP config write operation + */ +export interface McpWriteResult { + readonly success: boolean + readonly path: string + readonly serverCount: number + readonly error?: Error + readonly skipped?: boolean +} + +/** + * MCP configuration transformer function type + */ +export type McpConfigTransformer = (config: McpServerConfig) => Record + +export function collectMcpServersFromSkills(skills: readonly SkillPrompt[], logger?: ILogger): Map { + const merged = new Map() + const serverCountsBySkill = new Map() + + for (const skill of skills) { + if (skill.mcpConfig == null) continue + + const skillName = resolveSkillName(skill) + let count = 0 + for (const [name, config] of Object.entries(skill.mcpConfig.mcpServers)) { + merged.set(name, config) + count++ + } + if (count > 0) { + serverCountsBySkill.set(skillName, count) + } + } + + // Emit aggregated summary log instead of per-item logs + if (serverCountsBySkill.size > 0 && logger == null) return merged + + const totalServers = [...serverCountsBySkill.values()].reduce((a, b) => a + b, 0) + logger?.debug('mcp servers collected', { + totalSkills: serverCountsBySkill.size, + totalServers, + bySkill: Object.fromEntries(serverCountsBySkill) + }) + return merged +} + +export function transformMcpServerMap(servers: Map, transformer: McpConfigTransformer): TransformedMcpConfig { + const result: TransformedMcpConfig = {} + + for (const [name, config] of servers) result[name] = transformer(config) + + return result +} + +/** + * MCP Config Manager + * Handles merging and writing MCP configurations from skills to various output formats + */ +export class McpConfigManager { + private readonly fs: typeof import('node:fs') + private readonly logger: ILogger + + constructor(deps: {fs: typeof import('node:fs'), logger: ILogger}) { + this.fs = deps.fs + this.logger = deps.logger + } + + collectMcpServers(skills: readonly SkillPrompt[]): Map { + return collectMcpServersFromSkills(skills, this.logger) + } + + transformMcpServers(servers: Map, transformer: McpConfigTransformer): TransformedMcpConfig { + return transformMcpServerMap(servers, transformer) + } + + readExistingConfig(configPath: string): Record { + try { + if (this.fs.existsSync(configPath)) { + const content = this.fs.readFileSync(configPath, 'utf8') + return JSON.parse(content) as Record + } + } catch (error) { + this.logger.warn( + buildFileOperationDiagnostic({ + code: 'MCP_CONFIG_READ_FAILED', + title: 'Failed to read existing MCP config', + operation: 'read', + targetKind: 'MCP config file', + path: configPath, + error, + details: { + fallback: 'starting fresh' + } + }) + ) + } + return {} + } + + writeCursorMcpConfig(configPath: string, servers: TransformedMcpConfig, dryRun: boolean): McpWriteResult { + const existingConfig = this.readExistingConfig(configPath) + const existingMcpServers = (existingConfig['mcpServers'] as Record) ?? {} + + existingConfig['mcpServers'] = {...existingMcpServers, ...servers} + const content = JSON.stringify(existingConfig, null, 2) + + return this.writeConfigFile(configPath, content, Object.keys(servers).length, dryRun) + } + + writeOpencodeMcpConfig(configPath: string, servers: TransformedMcpConfig, dryRun: boolean, additionalConfig?: Record): McpWriteResult { + const existingConfig = this.readExistingConfig(configPath) + + const mergedConfig = { + // Merge with additional config (like $schema, plugin array) + ...existingConfig, + ...additionalConfig, + mcp: servers + } + + const content = JSON.stringify(mergedConfig, null, 2) + return this.writeConfigFile(configPath, content, Object.keys(servers).length, dryRun) + } + + writeSkillMcpConfig(configPath: string, rawContent: string, dryRun: boolean): McpWriteResult { + return this.writeConfigFile(configPath, rawContent, 1, dryRun) + } + + private ensureDirectory(dir: string): void { + if (!this.fs.existsSync(dir)) this.fs.mkdirSync(dir, {recursive: true}) + } + + private writeConfigFile(configPath: string, content: string, serverCount: number, dryRun: boolean): McpWriteResult { + if (dryRun) { + this.logger.trace({ + action: 'dryRun', + type: 'mcpConfig', + path: configPath, + serverCount + }) + return {success: true, path: configPath, serverCount, skipped: true} + } + + try { + this.ensureDirectory(path.dirname(configPath)) + this.fs.writeFileSync(configPath, content) + this.logger.trace({ + action: 'write', + type: 'mcpConfig', + path: configPath, + serverCount + }) + return {success: true, path: configPath, serverCount} + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error) + this.logger.error( + buildFileOperationDiagnostic({ + code: 'MCP_CONFIG_WRITE_FAILED', + title: 'Failed to write MCP config', + operation: 'write', + targetKind: 'MCP config file', + path: configPath, + error: errMsg + }) + ) + return { + success: false, + path: configPath, + serverCount: 0, + error: error as Error + } + } + } +} + +/** + * Transform MCP config for Cursor format + * Keeps standard MCP structure with command/args/env or url/headers + */ +export function transformMcpConfigForCursor(config: McpServerConfig): Record { + const result: Record = {} + + if (config.command != null) { + result['command'] = config.command + if (config.args != null) result['args'] = config.args + if (config.env != null) result['env'] = config.env + return result + } + + const configRecord = config as unknown as Record + const url = configRecord['url'] ?? configRecord['serverUrl'] + + if (url == null) return result + + result['url'] = url + const {headers} = configRecord + if (headers != null) result['headers'] = headers + + return result +} + +/** + * Transform MCP config for Opencode format + * Converts to local (command array) or remote (url) format with enabled flag + */ +export function transformMcpConfigForOpencode(config: McpServerConfig): Record { + const result: Record = {} + + if (config.command != null) { + result['type'] = 'local' + const commandArray = [config.command] + if (config.args != null) commandArray.push(...config.args) + result['command'] = commandArray + if (config.env != null) result['environment'] = config.env + } else { + result['type'] = 'remote' + const configRecord = config as unknown as Record + if (configRecord['url'] != null) result['url'] = configRecord['url'] + else if (configRecord['serverUrl'] != null) { + result['url'] = configRecord['serverUrl'] + } + } + + result['enabled'] = config.disabled !== true + + return result +} diff --git a/sdk/src/plugins/plugin-core/OutputTypes.ts b/sdk/src/plugins/plugin-core/OutputTypes.ts new file mode 100644 index 00000000..64f1c147 --- /dev/null +++ b/sdk/src/plugins/plugin-core/OutputTypes.ts @@ -0,0 +1,145 @@ +import type {GlobalConfigDirectoryType} from './enums' +import type {SubAgentPrompt} from './InputTypes' +import type {AbsolutePath, RelativePath} from './PromptTypes' + +/** + * Global configuration based on user_home root directory + */ +export interface GlobalConfigDirectoryInUserHome { + readonly type: K + readonly directory: RelativePath +} + +/** + * Special, absolute path global memory prompt + */ +export interface GlobalConfigDirectoryInOther { + readonly type: K + readonly directory: AbsolutePath +} + +export type GlobalConfigDirectory = GlobalConfigDirectoryInUserHome | GlobalConfigDirectoryInOther + +export interface Target { + +} + +/** + * SubAgent frontmatter field mapping + * Value can be a static string or a function that extracts value from SubAgentPrompt + */ +export type SubAgentFrontMatterField = string | ((subAgent: SubAgentPrompt) => unknown) + +/** + * SubAgent output configuration for declarative configuration + */ +export interface SubAgentOutputConfig { + /** Output subdirectory name (relative to IDE config directory) */ + readonly subDir?: string + + /** File name format template */ + readonly fileNameTemplate?: 'prefix-agent' | 'prefix_agent' | 'agent' | string + + /** Whether to include series prefix */ + readonly includeSeriesPrefix?: boolean + + /** Series prefix separator */ + readonly seriesSeparator?: string + + /** Frontmatter configuration */ + readonly frontMatter?: { + /** Custom field mappings */ + readonly fields?: Record + /** Fields to exclude */ + readonly exclude?: string[] + } + + /** Content transformation options */ + readonly contentTransform?: { + /** Whether to transform MDX references to Markdown */ + readonly transformMdxRefs?: boolean + /** Custom content processor */ + readonly processor?: (content: string, subAgent: SubAgentPrompt) => string + } +} + +/** + * Generic registry data structure. + * All registry files must have version and lastUpdated fields. + */ +export interface RegistryData { + readonly version: string + readonly lastUpdated: string +} + +/** + * Result of a registry operation. + */ +export interface RegistryOperationResult { + readonly success: boolean + readonly entryName: string + readonly error?: Error +} + +/** + * Source information for a Kiro power. + * Indicates the origin type of a registered power. + */ +export interface KiroPowerSource { + readonly type: 'local' | 'repo' | 'registry' + readonly repoId?: string + readonly repoName?: string + readonly cloneId?: string +} + +/** + * A single power entry in the Kiro registry. + * Contains metadata about an installed power. + */ +export interface KiroPowerEntry { + readonly name: string + readonly description: string + readonly mcpServers?: readonly string[] + readonly author?: string + readonly keywords: readonly string[] + readonly displayName?: string + readonly installed: boolean + readonly installedAt?: string + readonly installPath?: string + readonly source: KiroPowerSource + readonly sourcePath?: string +} + +/** + * Repository source tracking in Kiro registry. + * Tracks the source/origin of registered items. + */ +export interface KiroRepoSource { + readonly name: string + readonly type: 'local' | 'git' + readonly enabled: boolean + readonly addedAt?: string + readonly powerCount: number + readonly path?: string + readonly lastSync?: string + readonly powers?: readonly string[] +} + +/** + * Kiro recommended repo metadata (preserved during updates). + */ +export interface KiroRecommendedRepo { + readonly url: string + readonly lastFetch: string + readonly powerCount: number +} + +/** + * Complete Kiro powers registry structure. + * Represents the full ~/.kiro/powers/registry.json file. + */ +export interface KiroPowersRegistry extends RegistryData { + readonly powers: Record + readonly repoSources: Record + readonly kiroRecommendedRepo?: KiroRecommendedRepo +} diff --git a/sdk/src/plugins/plugin-core/PromptArtifactCache.test.ts b/sdk/src/plugins/plugin-core/PromptArtifactCache.test.ts new file mode 100644 index 00000000..9708baf5 --- /dev/null +++ b/sdk/src/plugins/plugin-core/PromptArtifactCache.test.ts @@ -0,0 +1,203 @@ +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {afterEach, describe, expect, it, vi} from 'vitest' + +import { + clearPromptArtifactCache, + compileRawPromptArtifact, + readPromptArtifact +} from './PromptArtifactCache' + +const {mdxToMdMock, parseMarkdownMock} = vi.hoisted(() => ({ + mdxToMdMock: vi.fn(async (content: string) => ({ + content: `compiled:${content.trim()}`, + metadata: { + fields: { + compiled: true + } + } + })), + parseMarkdownMock: vi.fn((content: string) => { + const frontMatterMatch = /^---\n([\s\S]*?)\n---\n?([\s\S]*)$/u.exec(content) + + if (frontMatterMatch != null) { + const rawFrontMatter = `---\n${frontMatterMatch[1]}\n---` + const markdownContent = frontMatterMatch[2].trim() + + return { + yamlFrontMatter: { + title: 'frontmatter' + }, + rawFrontMatter, + contentWithoutFrontMatter: markdownContent, + markdownAst: { + type: 'root' + }, + markdownContents: [markdownContent] + } + } + + const trimmed = content.trim() + return { + yamlFrontMatter: void 0, + rawFrontMatter: void 0, + contentWithoutFrontMatter: trimmed, + markdownAst: { + type: 'root' + }, + markdownContents: [trimmed] + } + }) +})) + +vi.mock('@truenine/md-compiler', () => ({ + mdxToMd: mdxToMdMock +})) + +vi.mock('@truenine/md-compiler/markdown', () => ({ + parseMarkdown: parseMarkdownMock +})) + +afterEach(() => { + clearPromptArtifactCache() + vi.clearAllMocks() +}) + +describe('prompt artifact cache', () => { + it('caches repeated source prompt compilation by file mtime', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-prompt-cache-source-')) + const filePath = path.join(tempDir, 'prompt.src.mdx') + + try { + fs.writeFileSync(filePath, 'Hello prompt', 'utf8') + + const first = await readPromptArtifact(filePath, { + mode: 'source' + }) + const second = await readPromptArtifact(filePath, { + mode: 'source' + }) + + expect(first.content).toBe('compiled:Hello prompt') + expect(second.content).toBe('compiled:Hello prompt') + expect(mdxToMdMock).toHaveBeenCalledTimes(1) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('reads export-default dist artifacts without recompiling', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-prompt-cache-dist-')) + const filePath = path.join(tempDir, 'prompt.mdx') + + try { + fs.writeFileSync(filePath, [ + 'export default {', + ' description: \'dist description\',', + ' version: \'1.0.0\'', + '}', + '', + 'Compiled body', + '' + ].join('\n'), 'utf8') + + const artifact = await readPromptArtifact(filePath, { + mode: 'dist' + }) + + expect(artifact.content).toBe('Compiled body') + expect(artifact.metadata).toEqual({ + description: 'dist description', + version: '1.0.0' + }) + expect(mdxToMdMock).not.toHaveBeenCalled() + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('still compiles frontmatter dist artifacts so MDX body syntax is resolved', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-prompt-cache-frontmatter-dist-')) + const filePath = path.join(tempDir, 'prompt.mdx') + + try { + fs.writeFileSync(filePath, [ + '---', + 'title: demo', + '---', + '', + 'Hello {profile.name}', + '' + ].join('\n'), 'utf8') + + const artifact = await readPromptArtifact(filePath, { + mode: 'dist' + }) + + expect(artifact.content).toContain('compiled:') + expect(artifact.metadata).toEqual({ + compiled: true + }) + expect(mdxToMdMock).toHaveBeenCalledTimes(1) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('falls back to mdx compilation when export-default metadata is not JSON5-compatible', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-prompt-cache-dist-fallback-')) + const filePath = path.join(tempDir, 'prompt.mdx') + + try { + fs.writeFileSync(filePath, [ + 'export default {', + ' description: `template literal metadata`,', + '}', + '', + 'Compiled body', + '' + ].join('\n'), 'utf8') + + const artifact = await readPromptArtifact(filePath, { + mode: 'dist' + }) + + expect(artifact.content).toContain('compiled:export default') + expect(mdxToMdMock).toHaveBeenCalledTimes(1) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('caches raw prompt recompilation for identical tool preset inputs', async () => { + const resultA = await compileRawPromptArtifact({ + filePath: '/tmp/command.mdx', + rawMdx: 'Tool preset body', + cacheMtimeMs: 42, + globalScope: { + tool: { + preset: 'demo' + } + } as never + }) + const resultB = await compileRawPromptArtifact({ + filePath: '/tmp/command.mdx', + rawMdx: 'Tool preset body', + cacheMtimeMs: 42, + globalScope: { + tool: { + preset: 'demo' + } + } as never + }) + + expect(resultA.content).toBe('compiled:Tool preset body') + expect(resultB.content).toBe('compiled:Tool preset body') + expect(mdxToMdMock).toHaveBeenCalledTimes(1) + }) +}) diff --git a/sdk/src/plugins/plugin-core/PromptArtifactCache.ts b/sdk/src/plugins/plugin-core/PromptArtifactCache.ts new file mode 100644 index 00000000..2ad98dfe --- /dev/null +++ b/sdk/src/plugins/plugin-core/PromptArtifactCache.ts @@ -0,0 +1,317 @@ +import type {MdxGlobalScope} from '@truenine/md-compiler/globals' +import type {ParsedMarkdown} from '@truenine/md-compiler/markdown' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {mdxToMd} from '@truenine/md-compiler' +import {parseMarkdown} from '@truenine/md-compiler/markdown' +import JSON5 from 'json5' + +export interface PromptArtifact { + readonly rawMdx: string + readonly parsed: ParsedMarkdown + readonly content: string + readonly metadata: Record + readonly lastModified: Date +} + +export interface ReadPromptArtifactOptions { + readonly mode: 'source' | 'dist' + readonly globalScope?: MdxGlobalScope | undefined + readonly rawMdx?: string | undefined + readonly lastModified?: Date | undefined +} + +export interface CompileRawPromptArtifactOptions { + readonly filePath: string + readonly globalScope?: MdxGlobalScope | undefined + readonly rawMdx: string + readonly cacheMtimeMs?: number | undefined +} + +export interface RawPromptCompilation { + readonly content: string + readonly metadata: Record +} + +interface CachedPromptArtifactValue { + readonly artifact: PromptArtifact + readonly stamp: number +} + +const promptArtifactCache = new Map>() +const rawPromptCompilationCache = new Map>() +const EXPORT_DEFAULT_PREFIX_PATTERN = /^export\s+default\s*/u + +function normalizeForCache(value: unknown): unknown { + if (value == null || typeof value !== 'object') { + return value + } + + if (Array.isArray(value)) { + return value.map(normalizeForCache) + } + + const normalizedEntries = Object.entries(value as Record) + .sort(([leftKey], [rightKey]) => leftKey.localeCompare(rightKey)) + .map(([key, nestedValue]) => [key, normalizeForCache(nestedValue)] as const) + return Object.fromEntries(normalizedEntries) +} + +function stableSerialize(value: unknown): string { + return JSON.stringify(normalizeForCache(value)) +} + +function buildArtifactCacheKey( + filePath: string, + stamp: number, + options: ReadPromptArtifactOptions +): string { + return [ + path.resolve(filePath), + stamp, + options.mode, + stableSerialize(options.globalScope ?? {}) + ].join('::') +} + +function buildRawCompilationCacheKey( + options: CompileRawPromptArtifactOptions +): string { + return [ + path.resolve(options.filePath), + options.cacheMtimeMs ?? options.rawMdx.length, + stableSerialize(options.globalScope ?? {}), + stableSerialize(options.rawMdx) + ].join('::') +} + +function trimMetadataPrefix(content: string): string { + return content.replace(/^\s*;?\s*/u, '').trim() +} + +function isRecord(value: unknown): value is Record { + return value != null && typeof value === 'object' && !Array.isArray(value) +} + +function extractObjectLiteral(source: string, startIndex: number): {value: string, endIndex: number} | null { + if (source[startIndex] !== '{') { + return null + } + + let depth = 0 + let inString: string | undefined + let escaped = false + let inLineComment = false + let inBlockComment = false + + for (let index = startIndex; index < source.length; index++) { + const current = source[index] + const next = source[index + 1] + + if (current == null) { + break + } + + if (inLineComment) { + if (current === '\n') { + inLineComment = false + } + continue + } + + if (inBlockComment) { + if (current === '*' && next === '/') { + inBlockComment = false + index++ + } + continue + } + + if (escaped) { + escaped = false + continue + } + + if (inString != null) { + if (current === '\\') { + escaped = true + continue + } + if (current === inString) { + inString = void 0 + } + continue + } + + if (current === '"' || current === '\'' || current === '`') { + inString = current + continue + } + + if (current === '/' && next === '/') { + inLineComment = true + index++ + continue + } + + if (current === '/' && next === '*') { + inBlockComment = true + index++ + continue + } + + if (current === '{') { + depth++ + continue + } + + if (current !== '}') { + continue + } + + depth-- + if (depth === 0) { + return { + value: source.slice(startIndex, index + 1), + endIndex: index + 1 + } + } + } + + return null +} + +function tryReadFastDistArtifact( + rawMdx: string +): {content: string, metadata: Record} | null { + const trimmed = rawMdx.trimStart() + + // Frontmatter and plain markdown dist prompts still need mdxToMd because the body + // may contain unresolved MDX expressions or components. + const prefixMatch = EXPORT_DEFAULT_PREFIX_PATTERN.exec(trimmed) + if (prefixMatch == null) return null + + const objectStartIndex = prefixMatch[0].length + const objectLiteral = extractObjectLiteral(trimmed, objectStartIndex) + if (objectLiteral == null) { + return null + } + + let metadata: unknown + try { + metadata = JSON5.parse(objectLiteral.value) + } + catch { + return null + } + + if (!isRecord(metadata)) { + return null + } + + return { + content: trimMetadataPrefix(trimmed.slice(objectLiteral.endIndex)), + metadata + } +} + +async function buildPromptArtifact( + filePath: string, + options: ReadPromptArtifactOptions +): Promise { + const rawMdx = options.rawMdx ?? fs.readFileSync(filePath, 'utf8') + const lastModified = options.lastModified ?? fs.statSync(filePath).mtime + const parsed = parseMarkdown(rawMdx) + + if (options.mode === 'dist') { + const fastDistArtifact = tryReadFastDistArtifact(rawMdx) + if (fastDistArtifact != null) { + return { + rawMdx, + parsed, + content: fastDistArtifact.content, + metadata: fastDistArtifact.metadata, + lastModified + } + } + } + + const compileResult = await mdxToMd(rawMdx, { + globalScope: options.globalScope, + extractMetadata: true, + basePath: path.dirname(filePath), + filePath + }) + + return { + rawMdx, + parsed, + content: compileResult.content, + metadata: compileResult.metadata.fields, + lastModified + } +} + +export async function readPromptArtifact( + filePath: string, + options: ReadPromptArtifactOptions +): Promise { + const lastModified = options.lastModified ?? fs.statSync(filePath).mtime + const stamp = lastModified.getTime() + const cacheKey = buildArtifactCacheKey(filePath, stamp, options) + const cached = promptArtifactCache.get(cacheKey) + if (cached != null) { + return (await cached).artifact + } + + const pendingArtifact = buildPromptArtifact(filePath, { + ...options, + lastModified + }).then(artifact => ({ + artifact, + stamp + })) + promptArtifactCache.set(cacheKey, pendingArtifact) + + try { + return (await pendingArtifact).artifact + } + catch (error) { + promptArtifactCache.delete(cacheKey) + throw error + } +} + +export async function compileRawPromptArtifact( + options: CompileRawPromptArtifactOptions +): Promise { + const cacheKey = buildRawCompilationCacheKey(options) + const cached = rawPromptCompilationCache.get(cacheKey) + if (cached != null) { + return cached + } + + const pendingCompilation = mdxToMd(options.rawMdx, { + globalScope: options.globalScope, + extractMetadata: true, + basePath: path.dirname(options.filePath), + filePath: options.filePath + }).then(result => ({ + content: result.content, + metadata: result.metadata.fields + })) + rawPromptCompilationCache.set(cacheKey, pendingCompilation) + + try { + return await pendingCompilation + } + catch (error) { + rawPromptCompilationCache.delete(cacheKey) + throw error + } +} + +export function clearPromptArtifactCache(): void { + promptArtifactCache.clear() + rawPromptCompilationCache.clear() +} diff --git a/sdk/src/plugins/plugin-core/PromptCompilerDiagnostics.test.ts b/sdk/src/plugins/plugin-core/PromptCompilerDiagnostics.test.ts new file mode 100644 index 00000000..f4989728 --- /dev/null +++ b/sdk/src/plugins/plugin-core/PromptCompilerDiagnostics.test.ts @@ -0,0 +1,47 @@ +import * as path from 'node:path' +import {UndefinedNamespaceError} from '@truenine/md-compiler/errors' +import {describe, expect, it} from 'vitest' +import { + formatPromptCompilerDiagnostic, + resolveSourcePathForDistFile +} from './PromptCompilerDiagnostics' + +describe('prompt compiler diagnostics', () => { + it('formats prompt-aware compiler diagnostics with dist and src paths', () => { + const error = new UndefinedNamespaceError('TODO', 'TODO', { + filePath: path.join('C:', 'repo', 'aindex', 'dist', 'skills', 'demo', 'examples', 'guide.mdx'), + sourceText: 'never leave placeholders or "{TODO}" markers', + position: { + start: {line: 1, column: 30, offset: 29}, + end: {line: 1, column: 36, offset: 35} + }, + nodeType: 'mdxTextExpression' + }) + + const message = formatPromptCompilerDiagnostic(error, { + operation: 'Failed to compile skill child doc.', + promptKind: 'skill-child-doc', + logicalName: 'demo/examples/guide', + entryDistPath: path.join('C:', 'repo', 'aindex', 'dist', 'skills', 'demo', 'skill.mdx'), + distPath: path.join('C:', 'repo', 'aindex', 'dist', 'skills', 'demo', 'examples', 'guide.mdx'), + srcPath: path.join('C:', 'repo', 'aindex', 'skills', 'demo', 'examples', 'guide.src.mdx') + }) + + expect(message).toContain('prompt kind: skill-child-doc') + expect(message).toContain('logical name: demo/examples/guide') + expect(message).toContain('entry dist file:') + expect(message).toContain('dist file:') + expect(message).toContain('src file:') + expect(message).toContain('location: 1:30-1:36') + expect(message).toContain('source line: never leave placeholders or "{TODO}" markers') + }) + + it('maps nested dist child docs back to src child docs', () => { + const mapped = resolveSourcePathForDistFile(path, path.join('C:', 'repo', 'aindex', 'dist', 'skills', 'demo', 'examples', 'guide.mdx'), { + distRootDir: path.join('C:', 'repo', 'aindex', 'dist', 'skills', 'demo'), + srcRootDir: path.join('C:', 'repo', 'aindex', 'skills', 'demo') + }) + + expect(mapped).toBe(path.join('C:', 'repo', 'aindex', 'skills', 'demo', 'examples', 'guide.src.mdx')) + }) +}) diff --git a/sdk/src/plugins/plugin-core/PromptCompilerDiagnostics.ts b/sdk/src/plugins/plugin-core/PromptCompilerDiagnostics.ts new file mode 100644 index 00000000..e36ed182 --- /dev/null +++ b/sdk/src/plugins/plugin-core/PromptCompilerDiagnostics.ts @@ -0,0 +1,65 @@ +import {CompilerDiagnosticError, formatCompilerDiagnostic} from '@truenine/md-compiler/errors' + +export interface PromptCompilerDiagnosticContext { + readonly promptKind: string + readonly logicalName: string + readonly distPath?: string | undefined + readonly entryDistPath?: string | undefined + readonly srcPath?: string | undefined + readonly operation?: string | undefined +} + +export interface SourceMappingOptions { + readonly preferredSourcePath?: string | undefined + readonly distRootDir?: string | undefined + readonly srcRootDir?: string | undefined +} + +export function resolveSourcePathForDistFile( + path: typeof import('node:path'), + distFilePath: string | undefined, + options: SourceMappingOptions = {} +): string | undefined { + const {preferredSourcePath, distRootDir, srcRootDir} = options + if (distFilePath == null || distFilePath.length === 0) return preferredSourcePath + if (preferredSourcePath != null && preferredSourcePath.length > 0) return preferredSourcePath + if (distRootDir == null || srcRootDir == null) return void 0 + + const relativePath = path.relative(distRootDir, distFilePath) + if (relativePath.startsWith('..') || path.isAbsolute(relativePath)) return void 0 + + return path.join(srcRootDir, relativePath.replace(/\.mdx$/u, '.src.mdx')) +} + +export function getDiagnosticFilePath(error: unknown): string | undefined { + if (error instanceof CompilerDiagnosticError && error.filePath != null) return error.filePath + if (!(error instanceof Error) || !('filePath' in error)) return void 0 + + const {filePath} = error as Error & {filePath?: unknown} + if (typeof filePath === 'string' && filePath.length > 0) return filePath + return void 0 +} + +export function formatPromptCompilerDiagnostic( + error: unknown, + context: PromptCompilerDiagnosticContext +): string { + const diagnosticFilePath = getDiagnosticFilePath(error) + const distPath = diagnosticFilePath ?? context.distPath + const lines = [ + context.operation ?? 'Prompt compilation failed.', + `prompt kind: ${context.promptKind}`, + `logical name: ${context.logicalName}` + ] + + if (context.entryDistPath != null && context.entryDistPath.length > 0 && context.entryDistPath !== distPath) { + lines.push(`entry dist file: ${context.entryDistPath}`) + } + + if (distPath != null && distPath.length > 0) lines.push(`dist file: ${distPath}`) + lines.push(`src file: ${context.srcPath ?? ''}`) + lines.push('diagnostic:') + lines.push(error instanceof Error ? formatCompilerDiagnostic(error) : String(error)) + + return lines.join('\n') +} diff --git a/sdk/src/plugins/plugin-core/PromptIdentity.ts b/sdk/src/plugins/plugin-core/PromptIdentity.ts new file mode 100644 index 00000000..f7ccf0ee --- /dev/null +++ b/sdk/src/plugins/plugin-core/PromptIdentity.ts @@ -0,0 +1,59 @@ +import type {SkillPrompt, SubAgentPrompt} from './types' + +function normalizePromptPath(value: string): string { + return value + .replaceAll('\\', '/') + .replaceAll(/^\/+|\/+$/gu, '') +} + +export function flattenPromptPath(value: string): string { + const normalized = normalizePromptPath(value) + if (normalized.length === 0) return '' + + return normalized + .split('/') + .filter(segment => segment.length > 0) + .join('-') +} + +export function deriveSubAgentIdentity(relativeName: string): { + readonly agentPrefix?: string + readonly agentName: string + readonly canonicalName: string +} { + const normalizedName = normalizePromptPath(relativeName) + const segments = normalizedName + .split('/') + .filter(segment => segment.length > 0) + + const agentName = segments.at(-1) ?? normalizedName + const prefixSegments = segments.slice(0, -1) + const canonicalName = flattenPromptPath(normalizedName) + const agentPrefix = prefixSegments.length > 0 + ? prefixSegments.join('-') + : void 0 + + return { + ...agentPrefix != null && {agentPrefix}, + agentName, + canonicalName: canonicalName.length > 0 ? canonicalName : agentName + } +} + +export function resolveSkillName(skill: Pick): string { + return skill.skillName.trim().length > 0 + ? skill.skillName + : skill.dir.getDirectoryName() +} + +export function resolveSubAgentCanonicalName( + subAgent: Pick +): string { + if (subAgent.canonicalName.trim().length > 0) return subAgent.canonicalName + + const fallback = subAgent.agentPrefix != null && subAgent.agentPrefix.length > 0 + ? `${subAgent.agentPrefix}-${subAgent.agentName}` + : subAgent.agentName + + return flattenPromptPath(fallback) +} diff --git a/sdk/src/plugins/plugin-core/PromptTypes.ts b/sdk/src/plugins/plugin-core/PromptTypes.ts new file mode 100644 index 00000000..03de03ff --- /dev/null +++ b/sdk/src/plugins/plugin-core/PromptTypes.ts @@ -0,0 +1,184 @@ +import type {Root, RootContent} from '@truenine/md-compiler' +import type {ClaudeCodeCLISubAgentColors, CodingAgentTools, FilePathKind, NamingCaseKind, PromptKind, RuleScope} from './enums' +import type {GlobalConfigDirectory} from './OutputTypes' + +/** Common directory representation */ +export interface Path { + readonly pathKind: K + readonly path: string + readonly getDirectoryName: () => string +} + +/** Relative path directory */ +export interface RelativePath extends Path { + readonly basePath: string + getAbsolutePath: () => string +} + +/** Absolute path directory */ +export type AbsolutePath = Path + +/** Root path directory */ +export type RootPath = Path + +export interface FileContent< + C = unknown, + FK extends FilePathKind = FilePathKind.Relative, + F extends Path = RelativePath +> { + content: C + length: number + filePathKind: FK + dir: F + charsetEncoding?: BufferEncoding +} + +/** + * Prompt + */ +export interface Prompt< + T extends PromptKind = PromptKind, + Y extends YAMLFrontMatter = YAMLFrontMatter, + DK extends FilePathKind = FilePathKind.Relative, + D extends Path = RelativePath, + C = unknown +> extends FileContent { + readonly type: T + readonly yamlFrontMatter?: Y + readonly rawFrontMatter?: string + readonly markdownAst?: Root + readonly markdownContents: readonly RootContent[] + readonly dir: D +} + +export interface YAMLFrontMatter extends Record { + readonly namingCase: N +} + +/** + * Series name type - used across multiple prompt types + * Extracted to avoid repetition and ensure consistency + */ +export type SeriName = string | string[] | null + +export interface CommonYAMLFrontMatter extends YAMLFrontMatter { + readonly description: string +} + +export interface ToolAwareYAMLFrontMatter extends CommonYAMLFrontMatter { + readonly allowTools?: (CodingAgentTools | string)[] + readonly argumentHint?: string +} + +/** + * Memory prompt working on project root directory + */ +export interface ProjectRootMemoryPrompt extends Prompt< + PromptKind.ProjectRootMemory, + YAMLFrontMatter, + FilePathKind.Relative, + RootPath +> { + readonly type: PromptKind.ProjectRootMemory +} + +/** + * Memory prompt working on project subdirectory + */ +export interface ProjectChildrenMemoryPrompt extends Prompt { + readonly type: PromptKind.ProjectChildrenMemory + readonly workingChildDirectoryPath: RelativePath +} + +export interface SubAgentYAMLFrontMatter extends ToolAwareYAMLFrontMatter { + readonly model?: string + readonly color?: ClaudeCodeCLISubAgentColors | string + readonly seriName?: SeriName + readonly scope?: RuleScope +} + +export interface CommandYAMLFrontMatter extends ToolAwareYAMLFrontMatter { + readonly seriName?: SeriName + readonly scope?: RuleScope +} // description, argumentHint, allowTools inherited from ToolAwareYAMLFrontMatter + +/** + * Base YAML front matter for all skill types + */ +export interface SkillsYAMLFrontMatter extends CommonYAMLFrontMatter { + readonly name?: string +} + +export interface SkillYAMLFrontMatter extends SkillsYAMLFrontMatter { + readonly allowTools?: (CodingAgentTools | string)[] + readonly keywords?: readonly string[] + readonly displayName?: string + readonly author?: string + readonly version?: string + readonly seriName?: SeriName + readonly scope?: RuleScope +} + +/** + * Codex skill metadata field + * Follows Agent Skills specification: https://agentskills.io/specification + * + * The metadata field is an arbitrary key-value mapping for additional metadata. + * Common fields include displayName, version, author, keywords, etc. + */ +export interface CodexSkillMetadata { + readonly 'short-description'?: string + readonly 'displayName'?: string + readonly 'version'?: string + readonly 'author'?: string + readonly 'keywords'?: readonly string[] + readonly 'category'?: string + readonly 'repository'?: string + readonly [key: string]: unknown +} + +export interface CodexSkillYAMLFrontMatter extends SkillsYAMLFrontMatter { + readonly 'license'?: string + readonly 'compatibility'?: string + readonly 'metadata'?: CodexSkillMetadata + readonly 'allowed-tools'?: string +} + +/** + * Kiro steering file front matter + * @see https://kiro.dev/docs/steering + */ +export interface KiroSteeringYAMLFrontMatter extends YAMLFrontMatter { + readonly inclusion?: 'always' | 'fileMatch' | 'manual' + readonly fileMatchPattern?: string +} + +/** + * Kiro Power POWER.md front matter + * @see https://kiro.dev/docs/powers + */ +export interface KiroPowerYAMLFrontMatter extends SkillsYAMLFrontMatter { + readonly displayName?: string + readonly keywords?: readonly string[] + readonly author?: string +} + +/** + * Rule YAML front matter with glob patterns and scope + */ +export interface RuleYAMLFrontMatter extends CommonYAMLFrontMatter { + readonly globs: readonly string[] + readonly scope?: RuleScope + readonly seriName?: SeriName +} + +/** + * Global memory prompt + * Single output target + */ +export interface GlobalMemoryPrompt extends Prompt< + PromptKind.GlobalMemory +> { + readonly type: PromptKind.GlobalMemory + readonly parentDirectoryPath: GlobalConfigDirectory +} diff --git a/sdk/src/plugins/plugin-core/RegistryWriter.ts b/sdk/src/plugins/plugin-core/RegistryWriter.ts new file mode 100644 index 00000000..4e74cd69 --- /dev/null +++ b/sdk/src/plugins/plugin-core/RegistryWriter.ts @@ -0,0 +1,179 @@ +/** + * Registry Configuration Writer + * + * Abstract base class for registry configuration writers. + * Provides common functionality for reading, writing, and merging JSON registry files. + * + * @see Requirements 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 7.1, 7.2 + */ + +import type {ILogger, RegistryData, RegistryOperationResult} from './types' + +import * as fs from 'node:fs' +import * as path from 'node:path' +import {createLogger} from '@truenine/logger' +import { + buildDiagnostic, + buildFileOperationDiagnostic, + diagnosticLines +} from '@/diagnostics' +import {resolveUserPath} from '@/runtime-environment' + +/** + * Abstract base class for registry configuration writers. + * Provides common functionality for reading, writing, and merging JSON registry files. + * + * @template TEntry - The type of entries stored in the registry + * @template TRegistry - The full registry data structure type + * + * @see Requirements 1.1, 1.2, 1.3, 1.7 + */ +export abstract class RegistryWriter< + TEntry, + TRegistry extends RegistryData = RegistryData +> { + protected readonly registryPath: string + + protected readonly log: ILogger + + protected constructor(registryPath: string, logger?: ILogger) { + this.registryPath = this.resolvePath(registryPath) + this.log = logger ?? createLogger(this.constructor.name) + } + + protected resolvePath(p: string): string { + if (p.startsWith('~')) return resolveUserPath(p) + return path.resolve(p) + } + + protected getRegistryDir(): string { + return path.dirname(this.registryPath) + } + + protected ensureRegistryDir(): void { + const dir = this.getRegistryDir() + if (!fs.existsSync(dir)) fs.mkdirSync(dir, {recursive: true}) + } + + read(): TRegistry { + if (!fs.existsSync(this.registryPath)) { + this.log.debug('registry not found', {path: this.registryPath}) + return this.createInitialRegistry() + } + + try { + const content = fs.readFileSync(this.registryPath, 'utf8') + return JSON.parse(content) as TRegistry + } + catch (error) { + const errMsg = error instanceof Error ? error.message : String(error) + this.log.error(buildFileOperationDiagnostic({ + code: 'REGISTRY_READ_FAILED', + title: 'Failed to read registry file', + operation: 'read', + targetKind: 'registry file', + path: this.registryPath, + error: errMsg + })) + return this.createInitialRegistry() + } + } + + protected write(data: TRegistry, dryRun?: boolean): boolean { + const updatedData = { // Update lastUpdated timestamp + ...data, + lastUpdated: new Date().toISOString() + } as TRegistry + + if (dryRun === true) { + this.log.trace({action: 'dryRun', type: 'registry', path: this.registryPath}) + return true + } + + const tempPath = `${this.registryPath}.tmp.${Date.now()}` + + try { + this.ensureRegistryDir() + + const content = JSON.stringify(updatedData, null, 2) // Write to temporary file first + fs.writeFileSync(tempPath, content, 'utf8') + + fs.renameSync(tempPath, this.registryPath) // Atomic rename to replace target + + this.log.trace({action: 'write', type: 'registry', path: this.registryPath}) + return true + } + catch (error) { + const errMsg = error instanceof Error ? error.message : String(error) + this.log.error(buildFileOperationDiagnostic({ + code: 'REGISTRY_WRITE_FAILED', + title: 'Failed to write registry file', + operation: 'write', + targetKind: 'registry file', + path: this.registryPath, + error: errMsg + })) + + try { // Cleanup temp file if it exists + if (fs.existsSync(tempPath)) fs.unlinkSync(tempPath) + } + catch { + } // Ignore cleanup errors + + return false + } + } + + register( + entries: readonly TEntry[], + dryRun?: boolean + ): readonly RegistryOperationResult[] { + const results: RegistryOperationResult[] = [] + + const existing = this.read() // Read existing registry + + const merged = this.merge(existing, entries) // Merge new entries + + const writeSuccess = this.write(merged, dryRun) // Write updated registry + + for (const entry of entries) { // Build results for each entry + const entryName = this.getEntryName(entry) + if (writeSuccess) { + results.push({success: true, entryName}) + if (dryRun === true) this.log.trace({action: 'dryRun', type: 'registerEntry', entryName}) + else this.log.trace({action: 'register', type: 'entry', entryName}) + } else { + results.push({success: false, entryName, error: new Error(`Failed to write registry file`)}) + this.log.error(buildDiagnostic({ + code: 'REGISTRY_ENTRY_REGISTRATION_FAILED', + title: `Failed to register registry entry: ${entryName}`, + rootCause: diagnosticLines( + `tnmsc could not persist the registry entry "${entryName}" because the registry write step failed.` + ), + exactFix: diagnosticLines( + 'Fix the registry path permissions or invalid on-disk state, then rerun tnmsc.' + ), + details: { + entryName, + registryPath: this.registryPath + } + })) + } + } + + return results + } + + protected generateEntryId(prefix?: string): string { + const timestamp = Date.now() + const random = Math.random().toString(36).slice(2, 8) + const id = `${timestamp}-${random}` + return prefix != null ? `${prefix}-${id}` : id + } + + protected abstract getEntryName(entry: TEntry): string + + protected abstract merge(existing: TRegistry, entries: readonly TEntry[]): TRegistry + + protected abstract createInitialRegistry(): TRegistry +} diff --git a/sdk/src/plugins/plugin-core/constants.ts b/sdk/src/plugins/plugin-core/constants.ts new file mode 100644 index 00000000..63078971 --- /dev/null +++ b/sdk/src/plugins/plugin-core/constants.ts @@ -0,0 +1,113 @@ +import type {UserConfigFile} from './ConfigTypes.schema' + +export const PathPlaceholders = { + USER_HOME: '~', + WORKSPACE: '$WORKSPACE' +} as const + +type DefaultUserConfig = Readonly>> +export const DEFAULT_USER_CONFIG = {} as DefaultUserConfig + +export const PLUGIN_NAMES = { + AgentsOutput: 'AgentsOutputPlugin', + GeminiCLIOutput: 'GeminiCLIOutputPlugin', + CursorOutput: 'CursorOutputPlugin', + WindsurfOutput: 'WindsurfOutputPlugin', + ClaudeCodeCLIOutput: 'ClaudeCodeCLIOutputPlugin', + KiroIDEOutput: 'KiroCLIOutputPlugin', + OpencodeCLIOutput: 'OpencodeCLIOutputPlugin', + OpenAICodexCLIOutput: 'CodexCLIOutputPlugin', + DroidCLIOutput: 'DroidCLIOutputPlugin', + WarpIDEOutput: 'WarpIDEOutputPlugin', + TraeIDEOutput: 'TraeIDEOutputPlugin', + TraeCNIDEOutput: 'TraeCNIDEOutputPlugin', + QoderIDEOutput: 'QoderIDEPluginOutputPlugin', + JetBrainsCodeStyleOutput: 'JetBrainsIDECodeStyleConfigOutputPlugin', + JetBrainsAICodexOutput: 'JetBrainsAIAssistantCodexOutputPlugin', + AgentSkillsCompactOutput: 'GenericSkillsOutputPlugin', + GitExcludeOutput: 'GitExcludeOutputPlugin', + ReadmeOutput: 'ReadmeMdConfigFileOutputPlugin', + VSCodeOutput: 'VisualStudioCodeIDEConfigOutputPlugin', + ZedOutput: 'ZedIDEConfigOutputPlugin', + EditorConfigOutput: 'EditorConfigOutputPlugin', + AntigravityOutput: 'AntigravityOutputPlugin' +} as const + +export type PluginName = (typeof PLUGIN_NAMES)[keyof typeof PLUGIN_NAMES] + +export const WORKSPACE_ROOT_PROJECT_NAME = '__workspace__' + +/** + * Constants for output plugins. + */ +export const OutputFileNames = { + SKILL: 'SKILL.md', + CURSOR_GLOBAL_RULE: 'global.mdc', + CURSOR_PROJECT_RULE: 'always.md', + MCP_CONFIG: 'mcp.json', + CLAUDE_MEMORY: 'CLAUDE.md', + WINDSURF_GLOBAL_RULE: 'global_rules.md' +} as const + +export const OutputPrefixes = { + RULE: 'rule-', + CHILD_RULE: 'glob-' +} as const + +export const OutputSubdirectories = { + RULES: 'rules', + COMMANDS: 'commands', + SKILLS: 'skills', + AGENTS: 'agents', + CURSOR_SKILLS: 'skills-cursor' +} as const + +export const FrontMatterFields = { + ALWAYS_APPLY: 'alwaysApply', + GLOBS: 'globs', + DESCRIPTION: 'description', + NAME: 'name', + TRIGGER: 'trigger' +} as const + +export const FileExtensions = { + MD: '.md', + MDC: '.mdc', + MDX: '.mdx', + JSON: '.json' +} as const + +export const SourcePromptExtensions = { + PRIMARY: '.src.mdx' +} as const + +export const SourcePromptFileExtensions = [SourcePromptExtensions.PRIMARY] as const + +export const SourceLocaleExtensions = { + zh: SourcePromptFileExtensions, + en: FileExtensions.MDX +} as const + +export function hasSourcePromptExtension(fileName: string): boolean { + return SourcePromptFileExtensions.some(extension => fileName.endsWith(extension)) +} + +export const GlobalConfigDirs = { + CURSOR: '.cursor', + CLAUDE: '.claude', + WINDSURF: '.codeium/windsurf', + WINDSURF_RULES: '.windsurf' +} as const + +export const IgnoreFiles = { + CURSOR: '.cursorignore', + WINDSURF: '.codeiumignore' +} as const + +export const PreservedSkills = { + CURSOR: new Set(['create-rule', 'create-skill', 'create-subagent', 'migrate-to-skills', 'update-cursor-settings']) +} as const + +export const ToolPresets = { + CLAUDE_CODE: 'claudeCode' +} as const diff --git a/sdk/src/plugins/plugin-core/enums.ts b/sdk/src/plugins/plugin-core/enums.ts new file mode 100644 index 00000000..d146d369 --- /dev/null +++ b/sdk/src/plugins/plugin-core/enums.ts @@ -0,0 +1,53 @@ +export enum PluginKind { + Output = 'output' +} + +export enum PromptKind { + GlobalMemory = 'globalMemory', + ProjectRootMemory = 'projectRootMemory', + ProjectChildrenMemory = 'projectChildrenMemory', + Command = 'command', + SubAgent = 'subAgent', + Skill = 'skill', + SkillChildDoc = 'skillChildDoc', + SkillResource = 'skillResource', + SkillMcpConfig = 'skillMcpConfig', + Readme = 'readme', + Rule = 'rule' +} + +export type RuleScope = 'project' | 'global' + +export enum FilePathKind { + Relative = 'relative', + Absolute = 'absolute', + Root = 'root' +} + +export enum IDEKind { + VSCode = 'vscode', + Zed = 'zed', + IntellijIDEA = 'intellijIdea', + Git = 'git', + EditorConfig = 'editorconfig', + Original = 'original' +} + +export enum NamingCaseKind { + CamelCase = 'camelCase', + PascalCase = 'pascalCase', + SnakeCase = 'snakeCase', + KebabCase = 'kebabCase', + UpperCase = 'upperCase', + LowerCase = 'lowerCase', + Original = 'original' +} + +export enum GlobalConfigDirectoryType { + UserHome = 'userHome', + External = 'external' +} + +export type CodingAgentTools = string + +export type ClaudeCodeCLISubAgentColors = string diff --git a/sdk/src/plugins/plugin-core/filters.ts b/sdk/src/plugins/plugin-core/filters.ts new file mode 100644 index 00000000..0cec8122 --- /dev/null +++ b/sdk/src/plugins/plugin-core/filters.ts @@ -0,0 +1,261 @@ +import type {ProjectConfig, RulePrompt, SeriName} from './types' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {getNativeBinding} from '@/core/native-binding' + +interface SeriesFilterFns { + readonly resolveEffectiveIncludeSeries: (topLevel?: readonly string[], typeSpecific?: readonly string[]) => string[] + readonly matchesSeries: (seriName: string | readonly string[] | null | undefined, effectiveIncludeSeries: readonly string[]) => boolean + readonly resolveSubSeries: ( + topLevel?: Readonly>, + typeSpecific?: Readonly> + ) => Record +} + +let seriesFilterFnsCache: SeriesFilterFns | undefined + +function getSeriesFilterFns(): SeriesFilterFns { + if (seriesFilterFnsCache != null) return seriesFilterFnsCache + + const candidate = getNativeBinding() + if (candidate == null) { + throw new TypeError('Native series-filter binding is required. Build or install the Rust NAPI package before running tnmsc.') + } + if ( + typeof candidate.matchesSeries !== 'function' + || typeof candidate.resolveEffectiveIncludeSeries !== 'function' + || typeof candidate.resolveSubSeries !== 'function' + ) { + throw new TypeError('Native series-filter binding is incomplete. Rebuild the Rust NAPI package before running tnmsc.') + } + seriesFilterFnsCache = candidate + return candidate +} + +function resolveEffectiveIncludeSeries(topLevel?: readonly string[], typeSpecific?: readonly string[]): string[] { + return getSeriesFilterFns().resolveEffectiveIncludeSeries(topLevel, typeSpecific) +} + +function matchesSeries(seriName: string | readonly string[] | null | undefined, effectiveIncludeSeries: readonly string[]): boolean { + return getSeriesFilterFns().matchesSeries(seriName, effectiveIncludeSeries) +} + +function resolveSubSeries( + topLevel?: Readonly>, + typeSpecific?: Readonly> +): Record { + return getSeriesFilterFns().resolveSubSeries(topLevel, typeSpecific) +} + +/** + * Interface for items that can be filtered by series name + */ +export interface SeriesFilterable { + readonly seriName?: SeriName +} + +/** + * Configuration path types for project config lookup + */ +export type FilterConfigPath = 'commands' | 'skills' | 'subAgents' | 'rules' + +export function filterByProjectConfig( + items: readonly T[], + projectConfig: ProjectConfig | undefined, + configPath: FilterConfigPath +): readonly T[] { + const effectiveSeries = resolveEffectiveIncludeSeries(projectConfig?.includeSeries, projectConfig?.[configPath]?.includeSeries) + return items.filter(item => matchesSeries(item.seriName, effectiveSeries)) +} + +export function normalizeSubdirPath(subdir: string): string { + let normalized = subdir.replaceAll(/\.\/+/g, '') + normalized = normalized.replaceAll(/\/+$/g, '') + return normalized +} + +function smartConcatGlob(prefix: string, glob: string): string { + if (glob.startsWith('**/')) return `${prefix}/${glob}` + if (glob.startsWith('*')) return `${prefix}/**/${glob}` + return `${prefix}/${glob}` +} + +function extractPrefixAndBaseGlob(glob: string, prefixes: readonly string[]): {prefix: string | null, baseGlob: string} { + for (const prefix of prefixes) { + const normalizedPrefix = prefix.replaceAll(/\/+$/g, '') + const patterns = [ + {prefix: normalizedPrefix, pattern: `${normalizedPrefix}/`}, + {prefix: normalizedPrefix, pattern: `${normalizedPrefix}\\`} + ] + for (const {prefix: p, pattern} of patterns) { + if (glob.startsWith(pattern)) return {prefix: p, baseGlob: glob.slice(pattern.length)} + } + if (glob === normalizedPrefix) return {prefix: normalizedPrefix, baseGlob: '**/*'} + } + return {prefix: null, baseGlob: glob} +} + +export function applySubSeriesGlobPrefix(rules: readonly RulePrompt[], projectConfig: ProjectConfig | undefined): readonly RulePrompt[] { + const subSeries = resolveSubSeries(projectConfig?.subSeries, projectConfig?.rules?.subSeries) + if (Object.keys(subSeries).length === 0) return rules + + const normalizedSubSeries: Record = {} + for (const [subdir, seriNames] of Object.entries(subSeries)) { + const normalizedSubdir = normalizeSubdirPath(subdir) + normalizedSubSeries[normalizedSubdir] = seriNames + } + + const allPrefixes = Object.keys(normalizedSubSeries) + + return rules.map(rule => { + if (rule.seriName == null) return rule + + const matchedPrefixes: string[] = [] + for (const [subdir, seriNames] of Object.entries(normalizedSubSeries)) { + const matched = Array.isArray(rule.seriName) ? rule.seriName.some(name => seriNames.includes(name)) : seriNames.includes(rule.seriName) + if (matched) matchedPrefixes.push(subdir) + } + + if (matchedPrefixes.length === 0) return rule + + const newGlobs: string[] = [] + for (const originalGlob of rule.globs) { + const {prefix: existingPrefix, baseGlob} = extractPrefixAndBaseGlob(originalGlob, allPrefixes) + + if (existingPrefix != null) newGlobs.push(originalGlob) + + for (const prefix of matchedPrefixes) { + if (prefix === existingPrefix) continue + const newGlob = smartConcatGlob(prefix, baseGlob) + if (!newGlobs.includes(newGlob)) newGlobs.push(newGlob) + } + } + + return { + ...rule, + globs: newGlobs + } + }) +} + +/** + * Resolves the actual `.git/info` directory for a given project path. + * Handles both regular git repos (`.git` is a directory) and submodules/worktrees (`.git` is a file with `gitdir:` pointer). + * Returns `null` if no valid git info directory can be resolved. + */ +export function resolveGitInfoDir(projectDir: string): string | null { + const dotGitPath = path.join(projectDir, '.git') + + if (!fs.existsSync(dotGitPath)) return null + + const stat = fs.lstatSync(dotGitPath) + + if (stat.isDirectory()) { + const infoDir = path.join(dotGitPath, 'info') + return infoDir + } + + if (stat.isFile()) { + try { + const content = fs.readFileSync(dotGitPath, 'utf8').trim() + const match = /^gitdir: (.+)$/.exec(content) + if (match?.[1] != null) { + const gitdir = path.resolve(projectDir, match[1]) + return path.join(gitdir, 'info') + } + } catch {} // ignore read errors + } + + return null +} + +/** + * Recursively discovers all `.git` entries (directories or files) under a given root, + * skipping common non-source directories. + * Returns absolute paths of directories containing a `.git` entry. + */ +export function findAllGitRepos(rootDir: string, maxDepth = 5): string[] { + const results: string[] = [] + const SKIP_DIRS = new Set(['node_modules', '.turbo', 'dist', 'build', 'out', '.cache']) + + function walk(dir: string, depth: number): void { + if (depth > maxDepth) return + + let entries: fs.Dirent[] + try { + const raw = fs.readdirSync(dir, {withFileTypes: true}) + if (!Array.isArray(raw)) return + entries = raw + } catch { + return + } + + const hasGit = entries.some(e => e.name === '.git') + if (hasGit && dir !== rootDir) results.push(dir) + + for (const entry of entries) { + if (!entry.isDirectory()) continue + if (entry.name === '.git' || SKIP_DIRS.has(entry.name)) continue + walk(path.join(dir, entry.name), depth + 1) + } + } + + walk(rootDir, 0) + return results +} + +/** + * Scans `.git/modules/` directory recursively to find all submodule `info/` dirs. + * Handles nested submodules (modules within modules). + * Returns absolute paths of `info/` directories. + */ +export function findGitModuleInfoDirs(dotGitDir: string): string[] { + const modulesDir = path.join(dotGitDir, 'modules') + if (!fs.existsSync(modulesDir)) return [] + + const results: string[] = [] + + function walk(dir: string): void { + let entries: fs.Dirent[] + try { + const raw = fs.readdirSync(dir, {withFileTypes: true}) + if (!Array.isArray(raw)) return + entries = raw + } catch { + return + } + + const hasInfo = entries.some(e => e.name === 'info' && e.isDirectory()) + if (hasInfo) results.push(path.join(dir, 'info')) + + const nestedModules = entries.find(e => e.name === 'modules' && e.isDirectory()) + if (nestedModules == null) return + + let subEntries: fs.Dirent[] + try { + const raw = fs.readdirSync(path.join(dir, 'modules'), {withFileTypes: true}) + if (!Array.isArray(raw)) return + subEntries = raw + } catch { + return + } + for (const sub of subEntries) { + if (sub.isDirectory()) walk(path.join(dir, 'modules', sub.name)) + } + } + + let topEntries: fs.Dirent[] + try { + const raw = fs.readdirSync(modulesDir, {withFileTypes: true}) + if (!Array.isArray(raw)) return results + topEntries = raw + } catch { + return results + } + + for (const entry of topEntries) { + if (entry.isDirectory()) walk(path.join(modulesDir, entry.name)) + } + + return results +} diff --git a/sdk/src/plugins/plugin-core/plugin.outputScopes.validation.test.ts b/sdk/src/plugins/plugin-core/plugin.outputScopes.validation.test.ts new file mode 100644 index 00000000..5b002ab5 --- /dev/null +++ b/sdk/src/plugins/plugin-core/plugin.outputScopes.validation.test.ts @@ -0,0 +1,182 @@ +import type {ILogger} from '@truenine/logger' +import type {OutputPlugin, OutputWriteContext} from './plugin' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {FilePathKind, PluginKind} from './enums' +import { + collectAllPluginOutputs, + executeDeclarativeWriteOutputs, + validateOutputScopeOverridesForPlugins +} from './plugin' + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger +} + +function createMockWriteContext(pluginName: string, topicOverride: Record): OutputWriteContext { + return { + logger: createMockLogger(), + fs, + path, + glob: {} as never, + dryRun: true, + pluginOptions: { + outputScopes: { + plugins: { + [pluginName]: topicOverride + } + } + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Relative, + path: '.', + basePath: '.', + getDirectoryName: () => '.', + getAbsolutePath: () => path.resolve('.') + }, + projects: [] + } + } + } as OutputWriteContext +} + +function createMockOutputPlugin(name: string): OutputPlugin { + return { + type: PluginKind.Output, + name, + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: { + commands: { + scopes: ['global'], + singleScope: true + } + }, + async declareOutputFiles() { + return [] + }, + async convertContent() { + return '' + } + } +} + +function createMultiScopeOutputPlugin(name: string): OutputPlugin { + return { + type: PluginKind.Output, + name, + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: { + commands: { + scopes: ['project', 'global'], + singleScope: false + } + }, + async declareOutputFiles() { + return [] + }, + async convertContent() { + return '' + } + } +} + +function createScopedDeclarationPlugin(name: string): OutputPlugin { + return { + type: PluginKind.Output, + name, + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return [ + {path: path.resolve('tmp/project.txt'), scope: 'project', source: {}}, + {path: path.resolve('tmp/global.txt'), scope: 'global', source: {}} + ] + }, + async convertContent() { + return '' + } + } +} + +describe('outputScopes capability validation', () => { + it('accepts valid topic override', async () => { + const plugin = createMockOutputPlugin('MockOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {commands: 'global'}) + + const result = await executeDeclarativeWriteOutputs([plugin], ctx) + expect(result.has(plugin.name)).toBe(true) + }) + + it('throws when override topic is unsupported by plugin capabilities', async () => { + const plugin = createMockOutputPlugin('MockOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {rules: 'global'}) + + await expect(executeDeclarativeWriteOutputs([plugin], ctx)) + .rejects + .toThrow('does not support topic "rules"') + }) + + it('throws when override scope is not allowed by plugin capabilities', async () => { + const plugin = createMockOutputPlugin('MockOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {commands: 'project'}) + + await expect(executeDeclarativeWriteOutputs([plugin], ctx)) + .rejects + .toThrow('requests unsupported scopes [project]') + }) + + it('applies the same validation in output collection path', async () => { + const plugin = createMockOutputPlugin('MockOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {rules: 'global'}) + + await expect(collectAllPluginOutputs([plugin], ctx)) + .rejects + .toThrow('does not support topic "rules"') + }) + + it('throws for multi-scope selection on single-scope topic', () => { + const plugin = createMockOutputPlugin('MockOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {commands: ['global', 'project']}) + + expect(() => validateOutputScopeOverridesForPlugins([plugin], ctx.pluginOptions)) + .toThrow('is single-scope and cannot request multiple scopes') + }) + + it('accepts multi-scope selection when the topic supports parallel scopes', () => { + const plugin = createMultiScopeOutputPlugin('MultiScopeOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {commands: ['project', 'global']}) + + expect(() => validateOutputScopeOverridesForPlugins([plugin], ctx.pluginOptions)).not.toThrow() + }) + + it('rejects workspace as an unsupported override scope', () => { + const plugin = createMultiScopeOutputPlugin('MultiScopeOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {commands: 'workspace'}) + + expect(() => validateOutputScopeOverridesForPlugins([plugin], ctx.pluginOptions)) + .toThrow('requests unsupported scopes [workspace]') + }) + + it('classifies project and global declarations during output collection', async () => { + const plugin = createScopedDeclarationPlugin('ScopedDeclarationPlugin') + const ctx = createMockWriteContext(plugin.name, {}) + + const outputs = await collectAllPluginOutputs([plugin], ctx) + + expect(outputs.projectFiles).toEqual([path.resolve('tmp/project.txt')]) + expect(outputs.globalFiles).toEqual([path.resolve('tmp/global.txt')]) + }) +}) diff --git a/sdk/src/plugins/plugin-core/plugin.ts b/sdk/src/plugins/plugin-core/plugin.ts new file mode 100644 index 00000000..9bed4e00 --- /dev/null +++ b/sdk/src/plugins/plugin-core/plugin.ts @@ -0,0 +1,541 @@ +import type {ILogger} from '@truenine/logger' +import type {MdxGlobalScope} from '@truenine/md-compiler/globals' +import type { + AindexConfig, + CleanupProtectionOptions, + CommandSeriesOptions, + FrontMatterOptions, + OutputScopeOptions, + OutputScopeSelection, + PluginOutputScopeTopics, + ProtectionMode, + WindowsOptions +} from './ConfigTypes.schema' +import type {PluginKind} from './enums' +import type { + InputCollectedContext, + OutputCollectedContext, + Project +} from './InputTypes' +import {Buffer} from 'node:buffer' +import * as fs from 'node:fs' +import * as path from 'node:path' + +export type FastGlobType = typeof import('fast-glob') + +/** + * Opaque type for ScopeRegistry. + * Concrete implementation lives in plugin-input-shared. + */ +export interface ScopeRegistryLike { + resolve: (expression: string) => string +} + +export interface DependencyNode { + readonly name: string + readonly log: ILogger + readonly dependsOn?: readonly string[] +} + +export interface Plugin extends DependencyNode { + readonly type: T +} + +export interface PluginContext { + logger: ILogger + fs: typeof import('node:fs') + path: typeof import('node:path') + glob: FastGlobType +} + +export interface InputCapabilityContext extends PluginContext { + readonly userConfigOptions: Required + readonly dependencyContext: Partial + readonly runtimeCommand?: 'execute' | 'dry-run' | 'clean' | 'plugins' + + readonly globalScope?: MdxGlobalScope + + readonly scopeRegistry?: ScopeRegistryLike +} + +export interface InputCapability extends DependencyNode { + collect: (ctx: InputCapabilityContext) => Partial | Promise> +} + +/** + * Capability that can enhance projects after all projects are collected. + * This is useful for capabilities that need to add data to projects + * collected by earlier capabilities. + */ +export interface ProjectEnhancerCapability extends InputCapability { + enhanceProjects: (ctx: InputCapabilityContext, projects: readonly Project[]) => Project[] +} + +export interface OutputRuntimeTargets { + readonly jetbrainsCodexDirs: readonly string[] +} + +/** + * Context for output plugin operations + */ +export interface OutputPluginContext { + readonly logger: ILogger + readonly collectedOutputContext: OutputCollectedContext + readonly pluginOptions?: PluginOptions + readonly runtimeTargets: OutputRuntimeTargets +} + +/** + * Context for output cleaning operations + */ +export interface OutputCleanContext extends OutputPluginContext { + readonly dryRun?: boolean +} + +/** + * Context for output writing operations + */ +export interface OutputWriteContext extends OutputPluginContext { + readonly dryRun?: boolean + + readonly registeredPluginNames?: readonly string[] +} + +/** + * Declarative host-home file that should be mirrored into configured WSL instances. + */ +export interface WslMirrorFileDeclaration { + /** Source path on the Windows host, typically under ~ */ + readonly sourcePath: string + /** Optional label for diagnostics/logging */ + readonly label?: string +} + +/** + * Result of a single write operation + */ +export interface WriteResult { + readonly path: string + readonly success: boolean + readonly skipped?: boolean + readonly error?: Error +} + +/** + * Collected results from write operations + */ +export interface WriteResults { + readonly files: readonly WriteResult[] + readonly dirs: readonly WriteResult[] +} + +/** + * Awaitable type for sync/async flexibility + */ +export type Awaitable = T | Promise + +/** + * Result of executing an input effect. + * Used for preprocessing/cleaning input sources before collection. + */ +export interface InputEffectResult { + /** Whether the effect executed successfully */ + readonly success: boolean + /** Error details if the effect failed */ + readonly error?: Error + /** Description of what the effect did (for logging) */ + readonly description?: string + /** Files that were modified/created */ + readonly modifiedFiles?: readonly string[] + /** Files that were deleted */ + readonly deletedFiles?: readonly string[] +} + +/** + * Context provided to input effect handlers. + * Contains utilities and configuration for effect execution. + */ +export interface InputEffectContext { + /** Logger instance */ + readonly logger: ILogger + /** File system module */ + readonly fs: typeof import('node:fs') + /** Path module */ + readonly path: typeof import('node:path') + /** Glob module for file matching */ + readonly glob: FastGlobType + /** Child process spawn function */ + readonly spawn: typeof import('node:child_process').spawn + /** User configuration options */ + readonly userConfigOptions: Required + /** Resolved workspace directory */ + readonly workspaceDir: string + /** Resolved aindex directory */ + readonly aindexDir: string + /** Whether running in dry-run mode */ + readonly dryRun?: boolean +} + +/** + * Handler function for input effects. + * Receives the effect context and returns an effect result. + */ +export type InputEffectHandler = (ctx: InputEffectContext) => Awaitable + +/** + * Registration entry for an input effect. + */ +export interface InputEffectRegistration { + /** Descriptive name for logging */ + readonly name: string + /** The effect handler function */ + readonly handler: InputEffectHandler + /** Priority for execution order (lower = earlier, default: 0) */ + readonly priority?: number +} + +/** + * Result of resolving base paths from plugin options. + */ +export interface ResolvedBasePaths { + /** The resolved workspace directory path */ + readonly workspaceDir: string + /** The resolved aindex directory path */ + readonly aindexDir: string +} + +/** + * Represents a registered scope entry from a plugin. + */ +export interface PluginScopeRegistration { + /** The namespace name (e.g., 'myPlugin') */ + readonly namespace: string + /** Key-value pairs registered under this namespace */ + readonly values: Record +} + +/** + * Output plugin interface. + * Declarative write model only: + * - Plugins declare target files + * - Plugins convert source metadata to content + * - Core runtime performs all file system operations + */ +export interface OutputPlugin extends Plugin { + readonly declarativeOutput: true + readonly outputCapabilities: OutputPluginCapabilities + + declareOutputFiles: (ctx: OutputWriteContext) => Awaitable + + convertContent: (declaration: OutputFileDeclaration, ctx: OutputWriteContext) => Awaitable + + declareCleanupPaths?: (ctx: OutputCleanContext) => Awaitable + + declareWslMirrorFiles?: (ctx: OutputWriteContext) => Awaitable +} + +/** + * Scope of a declared output file target. + */ +export type OutputDeclarationScope = 'project' | 'global' + +/** + * Supported output scope override topics. + */ +export const OUTPUT_SCOPE_TOPICS = ['prompt', 'rules', 'commands', 'subagents', 'skills', 'mcp'] as const + +/** + * Topic key for output scope override and capability declarations. + */ +export type OutputScopeTopic = (typeof OUTPUT_SCOPE_TOPICS)[number] + +/** + * Capability declaration for one output topic. + * - scopes: allowed source scopes for selection/override + * - singleScope: whether the topic resolves to a single scope by priority + */ +export interface OutputTopicCapability { + readonly scopes: readonly OutputDeclarationScope[] + readonly singleScope: boolean +} + +/** + * Per-plugin capability matrix for output topics. + */ +export type OutputPluginCapabilities = Partial> + +/** + * Declarative output file declaration. + * Output plugins only declare target paths and source metadata. + * Core runtime performs all file system write operations. + */ +export interface OutputFileDeclaration { + /** Absolute target file path */ + readonly path: string + /** Target scope classification for cleanup/routing */ + readonly scope?: OutputDeclarationScope + /** Plugin-defined source descriptor for content conversion */ + readonly source: unknown + /** Optional existing-file policy */ + readonly ifExists?: 'overwrite' | 'skip' | 'error' + /** Optional label for logging */ + readonly label?: string +} + +/** + * Scope of declarative cleanup targets. + */ +export type OutputCleanupScope = OutputDeclarationScope | 'xdgConfig' + +/** + * Kind of cleanup target. + */ +export type OutputCleanupTargetKind = 'file' | 'directory' | 'glob' + +/** + * Declarative cleanup target. + */ +export interface OutputCleanupPathDeclaration { + /** Absolute path or glob pattern */ + readonly path: string + /** Target kind */ + readonly kind: OutputCleanupTargetKind + /** Optional basename exclusions when expanding delete globs */ + readonly excludeBasenames?: readonly string[] + /** Protection mode to apply when used in protect declarations */ + readonly protectionMode?: ProtectionMode + /** Optional scope label for logging/trace */ + readonly scope?: OutputCleanupScope + /** Optional label for diagnostics */ + readonly label?: string +} + +/** + * Optional cleanup declaration set for one output plugin. + */ +export interface OutputCleanupDeclarations { + /** Paths/patterns that should be cleaned */ + readonly delete?: readonly OutputCleanupPathDeclaration[] + /** Paths/patterns that must be protected from cleanup */ + readonly protect?: readonly OutputCleanupPathDeclaration[] + /** Glob ignore patterns when expanding delete/protect globs */ + readonly excludeScanGlobs?: readonly string[] +} + +function isNodeBufferLike(value: unknown): value is Buffer { + return Buffer.isBuffer(value) +} + +function normalizeScopeSelection(selection: OutputScopeSelection): readonly OutputDeclarationScope[] { + if (typeof selection === 'string') return [selection] + + const unique: OutputDeclarationScope[] = [] + for (const scope of selection) { + if (!unique.includes(scope)) unique.push(scope) + } + return unique +} + +function getPluginScopeOverrides( + pluginName: string, + pluginOptions?: PluginOptions +): PluginOutputScopeTopics | undefined { + return pluginOptions?.outputScopes?.plugins?.[pluginName] +} + +export function validateOutputPluginCapabilities(plugin: OutputPlugin): void { + for (const topic of OUTPUT_SCOPE_TOPICS) { + const capability = plugin.outputCapabilities[topic] + if (capability == null) continue + if (capability.scopes.length === 0) throw new Error(`Plugin ${plugin.name} declares empty scopes for topic "${topic}"`) + } +} + +export function validateOutputScopeOverridesForPlugin( + plugin: OutputPlugin, + pluginOptions?: PluginOptions +): void { + const overrides = getPluginScopeOverrides(plugin.name, pluginOptions) + if (overrides == null) return + + for (const topic of OUTPUT_SCOPE_TOPICS) { + const requestedSelection = overrides[topic] + if (requestedSelection == null) continue + + const capability = plugin.outputCapabilities[topic] + if (capability == null) { + throw new Error( + `Invalid outputScopes configuration: outputScopes.plugins.${plugin.name}.${topic} is set, but plugin ${plugin.name} does not support topic "${topic}".` + ) + } + + const requestedScopes = normalizeScopeSelection(requestedSelection) + if (capability.singleScope && requestedScopes.length > 1) { + const requested = requestedScopes.join(', ') + throw new Error( + `Invalid outputScopes configuration: outputScopes.plugins.${plugin.name}.${topic} is single-scope and cannot request multiple scopes [${requested}].` + ) + } + + const allowedScopes = new Set(capability.scopes) + const unsupportedScopes = requestedScopes.filter(scope => !allowedScopes.has(scope)) + + if (unsupportedScopes.length > 0) { + const allowed = capability.scopes.join(', ') + const requested = unsupportedScopes.join(', ') + throw new Error( + `Invalid outputScopes configuration: outputScopes.plugins.${plugin.name}.${topic} requests unsupported scopes [${requested}]. Allowed scopes: [${allowed}].` + ) + } + } +} + +export function validateOutputScopeOverridesForPlugins( + plugins: readonly OutputPlugin[], + pluginOptions?: PluginOptions +): void { + for (const plugin of plugins) { + validateOutputPluginCapabilities(plugin) + validateOutputScopeOverridesForPlugin(plugin, pluginOptions) + } +} + +export async function collectOutputDeclarations( + plugins: readonly OutputPlugin[], + ctx: OutputWriteContext +): Promise> { + validateOutputScopeOverridesForPlugins(plugins, ctx.pluginOptions) + + const declarationEntries = await Promise.all( + plugins.map(async plugin => [plugin, await plugin.declareOutputFiles(ctx)] as const) + ) + + return new Map(declarationEntries) +} + +/** + * Execute declarative write operations for output plugins. + * Core runtime owns file system writes; plugins only declare and convert content. + */ +export async function executeDeclarativeWriteOutputs( + plugins: readonly OutputPlugin[], + ctx: OutputWriteContext, + predeclaredOutputs?: ReadonlyMap +): Promise> { + const results = new Map() + const outputDeclarations = predeclaredOutputs ?? await collectOutputDeclarations(plugins, ctx) + + for (const plugin of plugins) { + const declarations = outputDeclarations.get(plugin) ?? [] + const fileResults: WriteResult[] = [] + + for (const declaration of declarations) { + if (ctx.dryRun === true) { + fileResults.push({path: declaration.path, success: true, skipped: false}) + continue + } + + try { + const parentDir = path.dirname(declaration.path) + fs.mkdirSync(parentDir, {recursive: true}) + + if (declaration.ifExists === 'skip' && fs.existsSync(declaration.path)) { + fileResults.push({path: declaration.path, success: true, skipped: true}) + continue + } + + if (declaration.ifExists === 'error' && fs.existsSync(declaration.path)) throw new Error(`Refusing to overwrite existing file: ${declaration.path}`) + + const content = await plugin.convertContent(declaration, ctx) + isNodeBufferLike(content) + ? fs.writeFileSync(declaration.path, content) + : fs.writeFileSync(declaration.path, content, 'utf8') + fileResults.push({path: declaration.path, success: true}) + } + catch (error) { + fileResults.push({path: declaration.path, success: false, error: error as Error}) + } + } + + const pluginResult: WriteResults = {files: fileResults, dirs: []} + results.set(plugin.name, pluginResult) + } + + return results +} + +/** + * Collected outputs from all plugins. + * Used by the clean command to gather all artifacts for cleanup. + */ +export interface CollectedOutputs { + readonly projectDirs: readonly string[] + readonly projectFiles: readonly string[] + readonly globalDirs: readonly string[] + readonly globalFiles: readonly string[] +} + +/** + * Collect all outputs from all registered output plugins. + * This is the main entry point for the clean command. + */ +export async function collectAllPluginOutputs( + plugins: readonly OutputPlugin[], + ctx: OutputPluginContext, + predeclaredOutputs?: ReadonlyMap +): Promise { + const projectDirs: string[] = [] + const projectFiles: string[] = [] + const globalDirs: string[] = [] + const globalFiles: string[] = [] + + const declarationGroups = predeclaredOutputs != null + ? [...predeclaredOutputs.values()] + : Array.from( + await collectOutputDeclarations(plugins, {...ctx, dryRun: true}), + ([, declarations]) => declarations + ) + + for (const declarations of declarationGroups) { + for (const declaration of declarations) { + if (declaration.scope === 'global') globalFiles.push(declaration.path) + else projectFiles.push(declaration.path) + } + } + + return { + projectDirs, + projectFiles, + globalDirs, + globalFiles + } +} + +/** + * Configuration to be processed by plugin.config.ts + * Interpreted by plugin system as collection context + * Path placeholder `~` resolves to the user home directory. + * + * @see InputCollectedContext - Input-side collected context + * @see OutputCollectedContext - Output-side collected context + */ +export interface PluginOptions { + readonly version?: string + + readonly workspaceDir?: string + + readonly aindex?: AindexConfig + + readonly commandSeriesOptions?: CommandSeriesOptions + + readonly outputScopes?: OutputScopeOptions + + readonly frontMatter?: FrontMatterOptions + + readonly cleanupProtection?: CleanupProtectionOptions + + readonly windows?: WindowsOptions + + plugins?: readonly (InputCapability | OutputPlugin)[] + logLevel?: 'trace' | 'debug' | 'info' | 'warn' | 'error' +} diff --git a/sdk/src/plugins/plugin-core/scopePolicy.test.ts b/sdk/src/plugins/plugin-core/scopePolicy.test.ts new file mode 100644 index 00000000..34b83aa9 --- /dev/null +++ b/sdk/src/plugins/plugin-core/scopePolicy.test.ts @@ -0,0 +1,50 @@ +import {describe, expect, it} from 'vitest' +import {resolveTopicScopes} from './scopePolicy' + +describe('resolveTopicScopes', () => { + it('selects highest available scope for single-scope topics', () => { + const result = resolveTopicScopes({ + defaultScopes: ['project', 'global'], + supportedScopes: ['project', 'global'], + singleScope: true, + availableScopes: ['project', 'global'] + }) + + expect(result).toEqual(['project']) + }) + + it('respects requested scope when provided', () => { + const result = resolveTopicScopes({ + requestedScopes: ['global'], + defaultScopes: ['project', 'global'], + supportedScopes: ['project', 'global'], + singleScope: true, + availableScopes: ['project', 'global'] + }) + + expect(result).toEqual(['global']) + }) + + it('returns prioritized multi-scope list for multi-scope topics', () => { + const result = resolveTopicScopes({ + requestedScopes: ['global', 'project'], + defaultScopes: ['project', 'global'], + supportedScopes: ['project', 'global'], + singleScope: false + }) + + expect(result).toEqual(['project', 'global']) + }) + + it('returns empty when requested scope is unsupported', () => { + const result = resolveTopicScopes({ + requestedScopes: ['project'], + defaultScopes: ['project'], + supportedScopes: ['global'], + singleScope: true, + availableScopes: ['project', 'global'] + }) + + expect(result).toEqual([]) + }) +}) diff --git a/sdk/src/plugins/plugin-core/scopePolicy.ts b/sdk/src/plugins/plugin-core/scopePolicy.ts new file mode 100644 index 00000000..ca31a6c4 --- /dev/null +++ b/sdk/src/plugins/plugin-core/scopePolicy.ts @@ -0,0 +1,73 @@ +import type {OutputDeclarationScope} from './plugin' + +export const DEFAULT_SCOPE_PRIORITY: readonly OutputDeclarationScope[] = ['project', 'global'] as const + +export type ScopeSelectionInput = OutputDeclarationScope | readonly OutputDeclarationScope[] | undefined + +function normalizeSelection(selection: ScopeSelectionInput): OutputDeclarationScope[] { + if (selection == null) return [] + if (typeof selection === 'string') return [selection] + const unique: OutputDeclarationScope[] = [] + for (const scope of selection) { + if (!unique.includes(scope)) unique.push(scope) + } + return unique +} + +function sortByPriority( + scopes: readonly OutputDeclarationScope[], + priority: readonly OutputDeclarationScope[] +): OutputDeclarationScope[] { + const priorityIndex = new Map() + for (const [index, scope] of priority.entries()) priorityIndex.set(scope, index) + + return [...scopes].sort((a, b) => { + const ia = priorityIndex.get(a) ?? Number.MAX_SAFE_INTEGER + const ib = priorityIndex.get(b) ?? Number.MAX_SAFE_INTEGER + return ia - ib + }) +} + +export interface ResolveTopicScopesOptions { + readonly requestedScopes?: ScopeSelectionInput + readonly defaultScopes: readonly OutputDeclarationScope[] + readonly supportedScopes: readonly OutputDeclarationScope[] + readonly singleScope: boolean + readonly availableScopes?: readonly OutputDeclarationScope[] + readonly priority?: readonly OutputDeclarationScope[] +} + +export function resolveTopicScopes( + options: ResolveTopicScopesOptions +): readonly OutputDeclarationScope[] { + const { + requestedScopes, + defaultScopes, + supportedScopes, + singleScope, + availableScopes, + priority = DEFAULT_SCOPE_PRIORITY + } = options + + const requested = normalizeSelection(requestedScopes) + const defaults = normalizeSelection(defaultScopes) + const supported = new Set(normalizeSelection(supportedScopes)) + + const base = requested.length > 0 ? requested : defaults + const candidates = base.filter(scope => supported.has(scope)) + if (candidates.length === 0) return [] + + const prioritized = sortByPriority(candidates, priority) + + if (!singleScope) return prioritized + + if (availableScopes != null && availableScopes.length > 0) { + const available = new Set(availableScopes) + const matched = prioritized.find(scope => available.has(scope)) + if (matched == null) return [] + return [matched] + } + const [first] = prioritized + if (first == null) return [] + return [first] +} diff --git a/sdk/src/plugins/plugin-core/types.ts b/sdk/src/plugins/plugin-core/types.ts new file mode 100644 index 00000000..3a766dfe --- /dev/null +++ b/sdk/src/plugins/plugin-core/types.ts @@ -0,0 +1,39 @@ +export * from './AindexConfigDefaults' +export * from './AindexTypes' +export * from './ConfigTypes.schema' +export * from './enums' +export * from './ExportMetadataTypes' +export * from './InputTypes' +export * from './OutputTypes' +export * from './plugin' +export * from './PromptTypes' +export type { + DiagnosticLines, + ILogger, + LoggerDiagnosticInput, + LoggerDiagnosticRecord, + LogLevel +} from '@truenine/logger' + +export class MissingDependencyError extends Error { + readonly nodeName: string + + readonly missingDependency: string + + constructor(nodeName: string, missingDependency: string) { + super(`Node "${nodeName}" depends on missing dependency "${missingDependency}"`) + this.name = 'MissingDependencyError' + this.nodeName = nodeName + this.missingDependency = missingDependency + } +} + +export class CircularDependencyError extends Error { + readonly cyclePath: readonly string[] + + constructor(cyclePath: readonly string[]) { + super(`Circular dependency detected: ${cyclePath.join(' -> ')}`) + this.name = 'CircularDependencyError' + this.cyclePath = [...cyclePath] + } +} diff --git a/sdk/src/plugins/plugin-cursor.ts b/sdk/src/plugins/plugin-cursor.ts new file mode 100644 index 00000000..4c94c1bb --- /dev/null +++ b/sdk/src/plugins/plugin-cursor.ts @@ -0,0 +1,3 @@ +export { + CursorOutputPlugin +} from './CursorOutputPlugin' diff --git a/sdk/src/plugins/plugin-droid-cli.ts b/sdk/src/plugins/plugin-droid-cli.ts new file mode 100644 index 00000000..040d09e7 --- /dev/null +++ b/sdk/src/plugins/plugin-droid-cli.ts @@ -0,0 +1,3 @@ +export { + DroidCLIOutputPlugin +} from './DroidCLIOutputPlugin' diff --git a/sdk/src/plugins/plugin-editorconfig.ts b/sdk/src/plugins/plugin-editorconfig.ts new file mode 100644 index 00000000..189999e5 --- /dev/null +++ b/sdk/src/plugins/plugin-editorconfig.ts @@ -0,0 +1,3 @@ +export { + EditorConfigOutputPlugin +} from './EditorConfigOutputPlugin' diff --git a/sdk/src/plugins/plugin-gemini-cli.ts b/sdk/src/plugins/plugin-gemini-cli.ts new file mode 100644 index 00000000..4a330a0d --- /dev/null +++ b/sdk/src/plugins/plugin-gemini-cli.ts @@ -0,0 +1,3 @@ +export { + GeminiCLIOutputPlugin +} from './GeminiCLIOutputPlugin' diff --git a/sdk/src/plugins/plugin-git-exclude.ts b/sdk/src/plugins/plugin-git-exclude.ts new file mode 100644 index 00000000..b4de77a1 --- /dev/null +++ b/sdk/src/plugins/plugin-git-exclude.ts @@ -0,0 +1,3 @@ +export { + GitExcludeOutputPlugin +} from './GitExcludeOutputPlugin' diff --git a/sdk/src/plugins/plugin-jetbrains-ai-codex.ts b/sdk/src/plugins/plugin-jetbrains-ai-codex.ts new file mode 100644 index 00000000..0a3c6461 --- /dev/null +++ b/sdk/src/plugins/plugin-jetbrains-ai-codex.ts @@ -0,0 +1,3 @@ +export { + JetBrainsAIAssistantCodexOutputPlugin +} from './JetBrainsAIAssistantCodexOutputPlugin' diff --git a/sdk/src/plugins/plugin-jetbrains-codestyle.ts b/sdk/src/plugins/plugin-jetbrains-codestyle.ts new file mode 100644 index 00000000..768102b3 --- /dev/null +++ b/sdk/src/plugins/plugin-jetbrains-codestyle.ts @@ -0,0 +1,3 @@ +export { + JetBrainsIDECodeStyleConfigOutputPlugin +} from './JetBrainsIDECodeStyleConfigOutputPlugin' diff --git a/sdk/src/plugins/plugin-openai-codex-cli.ts b/sdk/src/plugins/plugin-openai-codex-cli.ts new file mode 100644 index 00000000..f1affd58 --- /dev/null +++ b/sdk/src/plugins/plugin-openai-codex-cli.ts @@ -0,0 +1,3 @@ +export { + CodexCLIOutputPlugin +} from './CodexCLIOutputPlugin' diff --git a/sdk/src/plugins/plugin-opencode-cli.ts b/sdk/src/plugins/plugin-opencode-cli.ts new file mode 100644 index 00000000..7ce39288 --- /dev/null +++ b/sdk/src/plugins/plugin-opencode-cli.ts @@ -0,0 +1,3 @@ +export { + OpencodeCLIOutputPlugin +} from './OpencodeCLIOutputPlugin' diff --git a/sdk/src/plugins/plugin-qoder-ide.ts b/sdk/src/plugins/plugin-qoder-ide.ts new file mode 100644 index 00000000..4573a43c --- /dev/null +++ b/sdk/src/plugins/plugin-qoder-ide.ts @@ -0,0 +1,3 @@ +export { + QoderIDEPluginOutputPlugin +} from './QoderIDEPluginOutputPlugin' diff --git a/sdk/src/plugins/plugin-readme.ts b/sdk/src/plugins/plugin-readme.ts new file mode 100644 index 00000000..e299d8c0 --- /dev/null +++ b/sdk/src/plugins/plugin-readme.ts @@ -0,0 +1,3 @@ +export { + ReadmeMdConfigFileOutputPlugin +} from './ReadmeMdConfigFileOutputPlugin' diff --git a/sdk/src/plugins/plugin-trae-cn-ide.ts b/sdk/src/plugins/plugin-trae-cn-ide.ts new file mode 100644 index 00000000..c064a45f --- /dev/null +++ b/sdk/src/plugins/plugin-trae-cn-ide.ts @@ -0,0 +1,3 @@ +export { + TraeCNIDEOutputPlugin +} from './TraeCNIDEOutputPlugin' diff --git a/sdk/src/plugins/plugin-trae-ide.ts b/sdk/src/plugins/plugin-trae-ide.ts new file mode 100644 index 00000000..d194f82b --- /dev/null +++ b/sdk/src/plugins/plugin-trae-ide.ts @@ -0,0 +1,3 @@ +export { + TraeIDEOutputPlugin +} from './TraeIDEOutputPlugin' diff --git a/sdk/src/plugins/plugin-vscode.ts b/sdk/src/plugins/plugin-vscode.ts new file mode 100644 index 00000000..c8848542 --- /dev/null +++ b/sdk/src/plugins/plugin-vscode.ts @@ -0,0 +1,3 @@ +export { + VisualStudioCodeIDEConfigOutputPlugin +} from './VisualStudioCodeIDEConfigOutputPlugin' diff --git a/sdk/src/plugins/plugin-warp-ide.ts b/sdk/src/plugins/plugin-warp-ide.ts new file mode 100644 index 00000000..b9e1bf10 --- /dev/null +++ b/sdk/src/plugins/plugin-warp-ide.ts @@ -0,0 +1,3 @@ +export { + WarpIDEOutputPlugin +} from './WarpIDEOutputPlugin' diff --git a/sdk/src/plugins/plugin-windsurf.ts b/sdk/src/plugins/plugin-windsurf.ts new file mode 100644 index 00000000..e749bd3d --- /dev/null +++ b/sdk/src/plugins/plugin-windsurf.ts @@ -0,0 +1,3 @@ +export { + WindsurfOutputPlugin +} from './WindsurfOutputPlugin' diff --git a/sdk/src/plugins/plugin-zed.ts b/sdk/src/plugins/plugin-zed.ts new file mode 100644 index 00000000..85ad398b --- /dev/null +++ b/sdk/src/plugins/plugin-zed.ts @@ -0,0 +1,3 @@ +export { + ZedIDEConfigOutputPlugin +} from './ZedIDEConfigOutputPlugin' diff --git a/sdk/src/prompts.test.ts b/sdk/src/prompts.test.ts new file mode 100644 index 00000000..add5c693 --- /dev/null +++ b/sdk/src/prompts.test.ts @@ -0,0 +1,367 @@ +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {afterEach, describe, expect, it} from 'vitest' +import { + getPrompt, + listPrompts, + resolvePromptDefinition, + upsertPromptSource, + writePromptArtifacts +} from './prompts' + +const tempDirs: string[] = [] + +function createTempWorkspace(prefix: string): string { + const dir = fs.mkdtempSync(path.join(os.tmpdir(), prefix)) + tempDirs.push(dir) + return dir +} + +function writeFile(filePath: string, content: string, modifiedAt: Date): void { + fs.mkdirSync(path.dirname(filePath), {recursive: true}) + fs.writeFileSync(filePath, content, 'utf8') + fs.utimesSync(filePath, modifiedAt, modifiedAt) +} + +function serviceOptions(workspaceDir: string) { + return { + loadUserConfig: false, + pluginOptions: { + workspaceDir + } + } as const +} + +afterEach(() => { + for (const dir of tempDirs.splice(0)) fs.rmSync(dir, {recursive: true, force: true}) +}) + +describe('prompt catalog service', () => { + it('lists every managed prompt family with status metadata', async () => { + const workspaceDir = createTempWorkspace('tnmsc-prompts-') + const aindexDir = path.join(workspaceDir, 'aindex') + const now = Date.now() + + writeFile( + path.join(aindexDir, 'global.src.mdx'), + '---\ndescription: global zh\n---\nGlobal zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'global.mdx'), + '---\ndescription: global en\n---\nGlobal en', + new Date(now - 10_000) + ) + writeFile( + path.join(aindexDir, 'dist', 'global.mdx'), + '---\ndescription: global dist\n---\nGlobal dist', + new Date(now - 10_000) + ) + + writeFile( + path.join(aindexDir, 'workspace.src.mdx'), + '---\ndescription: workspace zh\n---\nWorkspace zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'workspace.mdx'), + '---\ndescription: workspace en\n---\nWorkspace en', + new Date(now + 1_000) + ) + writeFile( + path.join(aindexDir, 'dist', 'workspace.mdx'), + '---\ndescription: workspace dist\n---\nWorkspace dist', + new Date(now + 1_000) + ) + + writeFile( + path.join(aindexDir, 'app', 'project-a', 'agt.src.mdx'), + '---\ndescription: project zh\n---\nProject zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'app', 'project-a', 'agt.mdx'), + '---\ndescription: project en\n---\nProject en', + new Date(now + 1_000) + ) + writeFile( + path.join(aindexDir, 'dist', 'app', 'project-a', 'agt.mdx'), + '---\ndescription: project dist\n---\nProject dist', + new Date(now + 1_000) + ) + + writeFile( + path.join(aindexDir, 'app', 'project-b', 'docs', 'agt.mdx'), + '---\ndescription: child legacy zh\n---\nChild legacy zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'dist', 'app', 'project-b', 'docs', 'agt.mdx'), + '---\ndescription: child dist\n---\nChild dist', + new Date(now + 1_000) + ) + + writeFile( + path.join(aindexDir, 'ext', 'project-a', 'agt.src.mdx'), + '---\ndescription: ext project zh\n---\nExt project zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'dist', 'ext', 'project-a', 'agt.mdx'), + '---\ndescription: ext project dist\n---\nExt project dist', + new Date(now + 1_000) + ) + + writeFile( + path.join(aindexDir, 'arch', 'system-a', 'agt.src.mdx'), + '---\ndescription: arch project zh\n---\nArch project zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'dist', 'arch', 'system-a', 'agt.mdx'), + '---\ndescription: arch project dist\n---\nArch project dist', + new Date(now + 1_000) + ) + + writeFile( + path.join(aindexDir, 'softwares', 'tool-a', 'agt.src.mdx'), + '---\ndescription: software project zh\n---\nSoftware project zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'dist', 'softwares', 'tool-a', 'agt.mdx'), + '---\ndescription: software project dist\n---\nSoftware project dist', + new Date(now + 1_000) + ) + + writeFile( + path.join(aindexDir, 'skills', 'reviewer', 'skill.src.mdx'), + '---\ndescription: skill zh\n---\nSkill zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'skills', 'reviewer', 'skill.mdx'), + '---\ndescription: skill en\n---\nSkill en', + new Date(now + 1_000) + ) + writeFile( + path.join(aindexDir, 'skills', 'reviewer', 'guide.src.mdx'), + '---\ndescription: guide zh\n---\nGuide zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'dist', 'skills', 'reviewer', 'skill.mdx'), + '---\ndescription: skill dist\n---\nSkill dist', + new Date(now + 1_000) + ) + writeFile( + path.join(aindexDir, 'dist', 'skills', 'reviewer', 'guide.mdx'), + '---\ndescription: guide dist\n---\nGuide dist', + new Date(now + 1_000) + ) + + writeFile( + path.join(aindexDir, 'commands', 'dev', 'build.src.mdx'), + '---\ndescription: command zh\n---\nCommand zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'dist', 'commands', 'dev', 'build.mdx'), + '---\ndescription: command dist\n---\nCommand dist', + new Date(now + 1_000) + ) + + writeFile( + path.join(aindexDir, 'subagents', 'qa', 'boot.src.mdx'), + '---\nname: boot\ndescription: subagent zh\n---\nSubagent zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'subagents', 'qa', 'boot.mdx'), + '---\nname: boot\ndescription: subagent en\n---\nSubagent en', + new Date(now + 1_000) + ) + writeFile( + path.join(aindexDir, 'dist', 'subagents', 'qa', 'boot.mdx'), + '---\nname: boot\ndescription: subagent dist\n---\nSubagent dist', + new Date(now + 1_000) + ) + + writeFile( + path.join(aindexDir, 'rules', 'frontend.src.mdx'), + '---\ndescription: rule zh\nglobs: ["src/**"]\n---\nRule zh', + new Date(now) + ) + writeFile( + path.join(aindexDir, 'dist', 'rules', 'frontend.mdx'), + '---\ndescription: rule dist\nglobs: ["src/**"]\n---\nRule dist', + new Date(now + 1_000) + ) + + const prompts = await listPrompts(serviceOptions(workspaceDir)) + + expect(prompts.map(prompt => prompt.promptId)).toEqual([ + 'command:dev/build', + 'global-memory', + 'project-child-memory:app/project-b/docs', + 'project-memory:app/project-a', + 'project-memory:arch/system-a', + 'project-memory:ext/project-a', + 'project-memory:softwares/tool-a', + 'rule:frontend', + 'skill-child-doc:reviewer/guide', + 'skill:reviewer', + 'subagent:qa/boot', + 'workspace-memory' + ]) + expect(prompts.find(prompt => prompt.promptId === 'global-memory')).toEqual(expect.objectContaining({enStatus: 'stale', distStatus: 'stale'})) + expect(prompts.find(prompt => prompt.promptId === 'workspace-memory')).toEqual(expect.objectContaining({enStatus: 'ready', distStatus: 'ready'})) + expect(prompts.find(prompt => prompt.promptId === 'project-child-memory:app/project-b/docs')).toEqual(expect.objectContaining({ + legacyZhSource: true, + enStatus: 'missing', + distStatus: 'ready' + })) + expect(prompts.find(prompt => prompt.promptId === 'project-memory:ext/project-a')).toEqual(expect.objectContaining({ + logicalName: 'ext/project-a', + distStatus: 'ready' + })) + expect(prompts.find(prompt => prompt.promptId === 'command:dev/build')).toEqual(expect.objectContaining({enStatus: 'missing', distStatus: 'ready'})) + + const filtered = await listPrompts({ + ...serviceOptions(workspaceDir), + kinds: ['project-memory'], + distStatus: ['ready'] + }) + + expect(filtered.map(prompt => prompt.promptId)).toEqual([ + 'project-memory:app/project-a', + 'project-memory:arch/system-a', + 'project-memory:ext/project-a', + 'project-memory:softwares/tool-a' + ]) + }) + + it('returns prompt contents and expected paths', async () => { + const workspaceDir = createTempWorkspace('tnmsc-prompt-details-') + const aindexDir = path.join(workspaceDir, 'aindex') + const modifiedAt = new Date() + + writeFile( + path.join(aindexDir, 'skills', 'reviewer', 'skill.src.mdx'), + '---\ndescription: skill zh\n---\nSkill zh', + modifiedAt + ) + writeFile( + path.join(aindexDir, 'skills', 'reviewer', 'skill.mdx'), + '---\ndescription: skill en\n---\nSkill en', + modifiedAt + ) + writeFile( + path.join(aindexDir, 'dist', 'skills', 'reviewer', 'skill.mdx'), + '---\ndescription: skill dist\n---\nSkill dist', + modifiedAt + ) + + const prompt = await getPrompt('skill:reviewer', serviceOptions(workspaceDir)) + const resolvedPaths = await resolvePromptDefinition('skill:reviewer', serviceOptions(workspaceDir)) + + expect(prompt).toEqual(expect.objectContaining({ + promptId: 'skill:reviewer', + frontMatter: expect.objectContaining({description: 'skill zh'}) + })) + expect(prompt?.src.zh?.content).toContain('Skill zh') + expect(prompt?.src.en?.content).toContain('Skill en') + expect(prompt?.dist?.content).toContain('Skill dist') + expect(resolvedPaths).toEqual(prompt?.paths) + }) + + it('migrates legacy project memory to the new zh/en source convention', async () => { + const workspaceDir = createTempWorkspace('tnmsc-project-migration-') + const aindexDir = path.join(workspaceDir, 'aindex') + const legacyPath = path.join(aindexDir, 'app', 'project-c', 'agt.mdx') + + writeFile( + legacyPath, + '---\ndescription: legacy zh\n---\nLegacy zh', + new Date() + ) + + const migrated = await upsertPromptSource({ + ...serviceOptions(workspaceDir), + promptId: 'project-memory:project-c', + locale: 'en', + content: '---\ndescription: translated en\n---\nTranslated en' + }) + + expect(fs.readFileSync(path.join(aindexDir, 'app', 'project-c', 'agt.src.mdx'), 'utf8')).toContain('Legacy zh') + expect(fs.readFileSync(legacyPath, 'utf8')).toContain('Translated en') + expect(migrated.promptId).toBe('project-memory:app/project-c') + expect(migrated.src.zh?.legacySource).toBeUndefined() + expect(migrated.src.en?.content).toContain('Translated en') + + const rewritten = await upsertPromptSource({ + ...serviceOptions(workspaceDir), + promptId: 'project-memory:project-c', + locale: 'zh', + content: '---\ndescription: rewritten zh\n---\nRewritten zh' + }) + + expect(fs.readFileSync(path.join(aindexDir, 'app', 'project-c', 'agt.src.mdx'), 'utf8')).toContain('Rewritten zh') + expect(fs.existsSync(legacyPath)).toBe(false) + expect(rewritten.exists.en).toBe(false) + }) + + it('accepts legacy app project IDs while resolving to series-aware paths', async () => { + const workspaceDir = createTempWorkspace('tnmsc-project-legacy-id-') + const aindexDir = path.join(workspaceDir, 'aindex') + const modifiedAt = new Date() + + writeFile( + path.join(aindexDir, 'app', 'project-a', 'agt.src.mdx'), + '---\ndescription: project zh\n---\nProject zh', + modifiedAt + ) + writeFile( + path.join(aindexDir, 'dist', 'app', 'project-a', 'agt.mdx'), + '---\ndescription: project dist\n---\nProject dist', + modifiedAt + ) + + const prompt = await getPrompt('project-memory:project-a', serviceOptions(workspaceDir)) + const resolvedPaths = await resolvePromptDefinition('project-memory:project-a', serviceOptions(workspaceDir)) + + expect(prompt?.promptId).toBe('project-memory:app/project-a') + expect(resolvedPaths.zh).toBe(path.join(aindexDir, 'app', 'project-a', 'agt.src.mdx')) + expect(resolvedPaths.dist).toBe(path.join(aindexDir, 'dist', 'app', 'project-a', 'agt.mdx')) + }) + + it('writes translation artifacts independently for en and dist', async () => { + const workspaceDir = createTempWorkspace('tnmsc-translation-write-') + const aindexDir = path.join(workspaceDir, 'aindex') + + writeFile( + path.join(aindexDir, 'commands', 'dev', 'ship.src.mdx'), + '---\ndescription: ship zh\n---\nShip zh', + new Date() + ) + + const afterEnWrite = await writePromptArtifacts({ + ...serviceOptions(workspaceDir), + promptId: 'command:dev/ship', + enContent: '---\ndescription: ship en\n---\nShip en' + }) + + expect(afterEnWrite.src.en?.content).toContain('Ship en') + expect(afterEnWrite.distStatus).toBe('missing') + + const afterDistWrite = await writePromptArtifacts({ + ...serviceOptions(workspaceDir), + promptId: 'command:dev/ship', + distContent: '---\ndescription: ship dist\n---\nShip dist' + }) + + expect(afterDistWrite.dist?.content).toContain('Ship dist') + expect(afterDistWrite.distStatus).toBe('ready') + }) +}) diff --git a/sdk/src/prompts.ts b/sdk/src/prompts.ts new file mode 100644 index 00000000..b04dd9b4 --- /dev/null +++ b/sdk/src/prompts.ts @@ -0,0 +1,804 @@ +import type {AindexProjectSeriesName, PluginOptions, YAMLFrontMatter} from '@/plugins/plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {parseMarkdown} from '@truenine/md-compiler/markdown' +import glob from 'fast-glob' +import { + isAindexProjectSeriesName, + resolveAindexProjectSeriesConfig, + resolveAindexProjectSeriesConfigs +} from '@/aindex-project-series' +import {mergeConfig, userConfigToPluginOptions} from './config' +import {getConfigLoader} from './ConfigLoader' +import {PathPlaceholders} from './plugins/plugin-core' +import {resolveUserPath} from './runtime-environment' + +export type ManagedPromptKind + = | 'global-memory' + | 'workspace-memory' + | 'project-memory' + | 'project-child-memory' + | 'skill' + | 'skill-child-doc' + | 'command' + | 'subagent' + | 'rule' + +export type PromptArtifactState = 'missing' | 'stale' | 'ready' +export type PromptSourceLocale = 'zh' | 'en' + +export interface PromptServiceOptions { + readonly cwd?: string + readonly loadUserConfig?: boolean + readonly pluginOptions?: Partial +} + +export interface ListPromptsOptions extends PromptServiceOptions { + readonly kinds?: readonly ManagedPromptKind[] + readonly query?: string + readonly enStatus?: readonly PromptArtifactState[] + readonly distStatus?: readonly PromptArtifactState[] +} + +export interface PromptArtifactRecord { + readonly path: string + readonly exists: true + readonly mtime: string + readonly mtimeMs: number + readonly size: number + readonly legacySource?: true + readonly frontMatter?: YAMLFrontMatter + readonly content?: string +} + +export interface PromptCatalogPaths { + readonly zh: string + readonly en: string + readonly dist: string +} + +export interface PromptCatalogPresence { + readonly zh: boolean + readonly en: boolean + readonly dist: boolean +} + +export interface PromptCatalogItem { + readonly promptId: string + readonly kind: ManagedPromptKind + readonly logicalName: string + readonly paths: PromptCatalogPaths + readonly exists: PromptCatalogPresence + readonly enStatus: PromptArtifactState + readonly distStatus: PromptArtifactState + readonly updatedAt?: string + readonly legacyZhSource?: true +} + +export interface PromptDetails extends PromptCatalogItem { + readonly src: { + readonly zh?: PromptArtifactRecord + readonly en?: PromptArtifactRecord + } + readonly dist?: PromptArtifactRecord + readonly frontMatter?: YAMLFrontMatter +} + +export interface UpsertPromptSourceInput extends PromptServiceOptions { + readonly promptId: string + readonly locale?: PromptSourceLocale + readonly content: string +} + +export interface WritePromptArtifactsInput extends PromptServiceOptions { + readonly promptId: string + readonly enContent?: string + readonly distContent?: string +} + +interface ResolvedPromptEnvironment { + readonly options: Required + readonly workspaceDir: string + readonly aindexDir: string +} + +interface PromptDefinition { + readonly promptId: string + readonly kind: ManagedPromptKind + readonly logicalName: string + readonly paths: PromptCatalogPaths + readonly legacyZhPath?: string +} + +interface PromptIdDescriptor { + readonly kind: ManagedPromptKind + readonly seriesName?: AindexProjectSeriesName + readonly projectName?: string + readonly relativeName?: string + readonly skillName?: string +} + +const SOURCE_PROMPT_EXTENSION = '.src.mdx' +const MDX_EXTENSION = '.mdx' +const PROJECT_MEMORY_FILE_NAME = 'agt' +const SKILL_ENTRY_FILE_NAME = 'skill' +const LEGACY_PROJECT_MEMORY_KINDS = new Set([ + 'project-memory', + 'project-child-memory' +]) + +function normalizeSlashPath(value: string): string { + return value.replaceAll('\\', '/') +} + +function normalizeRelativeIdentifier(value: string, fieldName: string): string { + const normalized = normalizeSlashPath(value).trim() + if (normalized.length === 0) throw new Error(`${fieldName} cannot be empty`) + + const segments = normalized.split('/') + for (const segment of segments) { + if (segment.length === 0 || segment === '.' || segment === '..') throw new Error(`${fieldName} contains an invalid path segment`) + } + + return segments.join('/') +} + +function isSingleSegmentIdentifier(value: string): boolean { + return !normalizeSlashPath(value).includes('/') +} + +function resolveConfiguredPath(rawPath: string, workspaceDir: string): string { + let resolved = rawPath + + if (resolved.includes(PathPlaceholders.WORKSPACE)) resolved = resolved.replace(PathPlaceholders.WORKSPACE, workspaceDir) + + return resolveUserPath(resolved) +} + +function resolvePromptEnvironment(options: PromptServiceOptions = {}): ResolvedPromptEnvironment { + const {cwd, loadUserConfig = true, pluginOptions = {}} = options + let userConfigOptions: Partial = {} + + if (loadUserConfig) { + const userConfigResult = getConfigLoader().load(cwd) + if (userConfigResult.found) userConfigOptions = userConfigToPluginOptions(userConfigResult.config) + } + + const mergedOptions = mergeConfig(userConfigOptions, pluginOptions) + const workspaceDir = resolveConfiguredPath(mergedOptions.workspaceDir, '') + const aindexDir = path.join(workspaceDir, mergedOptions.aindex.dir) + + return { + options: mergedOptions, + workspaceDir, + aindexDir + } +} + +function deriveEnglishSourcePath(zhPath: string): string { + if (zhPath.endsWith(SOURCE_PROMPT_EXTENSION)) return `${zhPath.slice(0, -SOURCE_PROMPT_EXTENSION.length)}${MDX_EXTENSION}` + + const ext = path.extname(zhPath) + if (ext === MDX_EXTENSION) return zhPath + return `${zhPath}${MDX_EXTENSION}` +} + +function stripPromptExtension(filePath: string): string { + if (filePath.endsWith(SOURCE_PROMPT_EXTENSION)) return filePath.slice(0, -SOURCE_PROMPT_EXTENSION.length) + + if (filePath.endsWith(MDX_EXTENSION)) return filePath.slice(0, -MDX_EXTENSION.length) + + return filePath +} + +function listFiles(cwd: string, patterns: readonly string[]): string[] { + if (!(fs.existsSync(cwd) && fs.statSync(cwd).isDirectory())) return [] + + return glob.sync([...patterns], { + cwd, + dot: true, + onlyFiles: true + }).map(normalizeSlashPath) +} + +function buildGlobalMemoryDefinition(env: ResolvedPromptEnvironment): PromptDefinition { + const zhPath = path.join(env.aindexDir, env.options.aindex.globalPrompt.src) + + return { + promptId: 'global-memory', + kind: 'global-memory', + logicalName: 'global-memory', + paths: { + zh: zhPath, + en: deriveEnglishSourcePath(zhPath), + dist: path.join(env.aindexDir, env.options.aindex.globalPrompt.dist) + } + } +} + +function buildWorkspaceMemoryDefinition(env: ResolvedPromptEnvironment): PromptDefinition { + const zhPath = path.join(env.aindexDir, env.options.aindex.workspacePrompt.src) + + return { + promptId: 'workspace-memory', + kind: 'workspace-memory', + logicalName: 'workspace-memory', + paths: { + zh: zhPath, + en: deriveEnglishSourcePath(zhPath), + dist: path.join(env.aindexDir, env.options.aindex.workspacePrompt.dist) + } + } +} + +function buildProjectMemoryDefinition( + env: ResolvedPromptEnvironment, + seriesName: AindexProjectSeriesName, + projectName: string, + relativeName?: string +): PromptDefinition { + const normalizedProjectName = normalizeRelativeIdentifier(projectName, 'projectName') + if (!isSingleSegmentIdentifier(normalizedProjectName)) throw new Error('projectName must be a single path segment') + + const normalizedRelativeName = relativeName == null + ? '' + : normalizeRelativeIdentifier(relativeName, 'relativeName') + const seriesConfig = resolveAindexProjectSeriesConfig(env.options, seriesName) + const sourceDir = normalizedRelativeName.length === 0 + ? path.join(env.aindexDir, seriesConfig.src, normalizedProjectName) + : path.join(env.aindexDir, seriesConfig.src, normalizedProjectName, normalizedRelativeName) + const distDir = normalizedRelativeName.length === 0 + ? path.join(env.aindexDir, seriesConfig.dist, normalizedProjectName) + : path.join(env.aindexDir, seriesConfig.dist, normalizedProjectName, normalizedRelativeName) + const legacyPath = path.join(sourceDir, `${PROJECT_MEMORY_FILE_NAME}${MDX_EXTENSION}`) + const logicalSuffix = normalizedRelativeName.length === 0 + ? `${seriesName}/${normalizedProjectName}` + : `${seriesName}/${normalizedProjectName}/${normalizedRelativeName}` + + return { + promptId: normalizedRelativeName.length === 0 + ? `project-memory:${logicalSuffix}` + : `project-child-memory:${logicalSuffix}`, + kind: normalizedRelativeName.length === 0 ? 'project-memory' : 'project-child-memory', + logicalName: logicalSuffix, + paths: { + zh: path.join(sourceDir, `${PROJECT_MEMORY_FILE_NAME}${SOURCE_PROMPT_EXTENSION}`), + en: legacyPath, + dist: path.join(distDir, `${PROJECT_MEMORY_FILE_NAME}${MDX_EXTENSION}`) + }, + legacyZhPath: legacyPath + } +} + +function buildSkillDefinition( + env: ResolvedPromptEnvironment, + skillName: string +): PromptDefinition { + const normalizedSkillName = normalizeRelativeIdentifier(skillName, 'skillName') + if (!isSingleSegmentIdentifier(normalizedSkillName)) throw new Error('skillName must be a single path segment') + + const sourceDir = path.join(env.aindexDir, env.options.aindex.skills.src, normalizedSkillName) + const distDir = path.join(env.aindexDir, env.options.aindex.skills.dist, normalizedSkillName) + + return { + promptId: `skill:${normalizedSkillName}`, + kind: 'skill', + logicalName: normalizedSkillName, + paths: { + zh: path.join(sourceDir, `${SKILL_ENTRY_FILE_NAME}${SOURCE_PROMPT_EXTENSION}`), + en: path.join(sourceDir, `${SKILL_ENTRY_FILE_NAME}${MDX_EXTENSION}`), + dist: path.join(distDir, `${SKILL_ENTRY_FILE_NAME}${MDX_EXTENSION}`) + } + } +} + +function buildSkillChildDocDefinition( + env: ResolvedPromptEnvironment, + skillName: string, + relativeName: string +): PromptDefinition { + const normalizedSkillName = normalizeRelativeIdentifier(skillName, 'skillName') + const normalizedRelativeName = normalizeRelativeIdentifier(relativeName, 'relativeName') + if (!isSingleSegmentIdentifier(normalizedSkillName)) throw new Error('skillName must be a single path segment') + + const sourceDir = path.join(env.aindexDir, env.options.aindex.skills.src, normalizedSkillName) + const distDir = path.join(env.aindexDir, env.options.aindex.skills.dist, normalizedSkillName) + + return { + promptId: `skill-child-doc:${normalizedSkillName}/${normalizedRelativeName}`, + kind: 'skill-child-doc', + logicalName: `${normalizedSkillName}/${normalizedRelativeName}`, + paths: { + zh: path.join(sourceDir, `${normalizedRelativeName}${SOURCE_PROMPT_EXTENSION}`), + en: path.join(sourceDir, `${normalizedRelativeName}${MDX_EXTENSION}`), + dist: path.join(distDir, `${normalizedRelativeName}${MDX_EXTENSION}`) + } + } +} + +function buildFlatPromptDefinition( + env: ResolvedPromptEnvironment, + kind: Extract, + relativeName: string +): PromptDefinition { + const normalizedRelativeName = normalizeRelativeIdentifier(relativeName, 'relativeName') + const sourceDir = kind === 'command' + ? path.join(env.aindexDir, env.options.aindex.commands.src) + : kind === 'subagent' + ? path.join(env.aindexDir, env.options.aindex.subAgents.src) + : path.join(env.aindexDir, env.options.aindex.rules.src) + const distDir = kind === 'command' + ? path.join(env.aindexDir, env.options.aindex.commands.dist) + : kind === 'subagent' + ? path.join(env.aindexDir, env.options.aindex.subAgents.dist) + : path.join(env.aindexDir, env.options.aindex.rules.dist) + + return { + promptId: `${kind}:${normalizedRelativeName}`, + kind, + logicalName: normalizedRelativeName, + paths: { + zh: path.join(sourceDir, `${normalizedRelativeName}${SOURCE_PROMPT_EXTENSION}`), + en: path.join(sourceDir, `${normalizedRelativeName}${MDX_EXTENSION}`), + dist: path.join(distDir, `${normalizedRelativeName}${MDX_EXTENSION}`) + } + } +} + +function parsePromptId(promptId: string): PromptIdDescriptor { + switch (promptId) { + case 'global-memory': return {kind: 'global-memory'} + case 'workspace-memory': return {kind: 'workspace-memory'} + default: break + } + + const separatorIndex = promptId.indexOf(':') + if (separatorIndex === -1) throw new Error(`Unsupported promptId: ${promptId}`) + + const kind = promptId.slice(0, separatorIndex) as ManagedPromptKind + const rawValue = promptId.slice(separatorIndex + 1) + const normalizedValue = normalizeRelativeIdentifier(rawValue, 'promptId') + + switch (kind) { + case 'project-memory': + return parseProjectPromptDescriptor(kind, normalizedValue) + case 'project-child-memory': { + return parseProjectPromptDescriptor(kind, normalizedValue) + } + case 'skill': + if (!isSingleSegmentIdentifier(normalizedValue)) throw new Error('skill promptId must include a single skill name') + return {kind, skillName: normalizedValue} + case 'skill-child-doc': { + const [skillName, ...rest] = normalizedValue.split('/') + const relativeName = rest.join('/') + if (skillName == null || relativeName.length === 0) throw new Error('skill-child-doc promptId must include skill and child path') + return {kind, skillName, relativeName} + } + case 'command': + case 'subagent': + case 'rule': return {kind, relativeName: normalizedValue} + default: throw new Error(`Unsupported promptId: ${promptId}`) + } +} + +function parseProjectPromptDescriptor( + kind: Extract, + normalizedValue: string +): PromptIdDescriptor { + const segments = normalizedValue.split('/') + const maybeSeriesName = segments[0] + const hasSeriesName = maybeSeriesName != null && isAindexProjectSeriesName(maybeSeriesName) + + if (kind === 'project-memory') { + if (hasSeriesName) { + const projectName = segments[1] + if (projectName == null || segments.length !== 2) throw new Error('project-memory promptId must include exactly one project name after the series') + return {kind, seriesName: maybeSeriesName, projectName} + } + + if (!isSingleSegmentIdentifier(normalizedValue)) throw new Error('project-memory promptId must include a single project name') + return {kind, seriesName: 'app', projectName: normalizedValue} + } + + if (hasSeriesName) { + const projectName = segments[1] + const relativeName = segments.slice(2).join('/') + if (projectName == null || relativeName.length === 0) throw new Error('project-child-memory promptId must include series, project, and child path') + return {kind, seriesName: maybeSeriesName, projectName, relativeName} + } + + const [projectName, ...rest] = segments + const relativeName = rest.join('/') + if (projectName == null || relativeName.length === 0) throw new Error('project-child-memory promptId must include project and child path') + return {kind, seriesName: 'app', projectName, relativeName} +} + +function buildPromptDefinitionFromId( + promptId: string, + env: ResolvedPromptEnvironment +): PromptDefinition { + const descriptor = parsePromptId(promptId) + + switch (descriptor.kind) { + case 'global-memory': return buildGlobalMemoryDefinition(env) + case 'workspace-memory': return buildWorkspaceMemoryDefinition(env) + case 'project-memory': + if (descriptor.projectName == null) throw new Error('project-memory promptId must include a project name') + return buildProjectMemoryDefinition(env, descriptor.seriesName ?? 'app', descriptor.projectName) + case 'project-child-memory': + if (descriptor.projectName == null || descriptor.relativeName == null) { + throw new Error('project-child-memory promptId must include project and child path') + } + return buildProjectMemoryDefinition(env, descriptor.seriesName ?? 'app', descriptor.projectName, descriptor.relativeName) + case 'skill': + if (descriptor.skillName == null) throw new Error('skill promptId must include a skill name') + return buildSkillDefinition(env, descriptor.skillName) + case 'skill-child-doc': + if (descriptor.skillName == null || descriptor.relativeName == null) { + throw new Error('skill-child-doc promptId must include skill and child path') + } + return buildSkillChildDocDefinition(env, descriptor.skillName, descriptor.relativeName) + case 'command': + case 'subagent': + case 'rule': + if (descriptor.relativeName == null) throw new Error(`${descriptor.kind} promptId must include a relative path`) + return buildFlatPromptDefinition(env, descriptor.kind, descriptor.relativeName) + } +} + +function collectFlatPromptIds( + env: ResolvedPromptEnvironment, + kind: Extract +): string[] { + const sourceDir = kind === 'command' + ? path.join(env.aindexDir, env.options.aindex.commands.src) + : kind === 'subagent' + ? path.join(env.aindexDir, env.options.aindex.subAgents.src) + : path.join(env.aindexDir, env.options.aindex.rules.src) + const distDir = kind === 'command' + ? path.join(env.aindexDir, env.options.aindex.commands.dist) + : kind === 'subagent' + ? path.join(env.aindexDir, env.options.aindex.subAgents.dist) + : path.join(env.aindexDir, env.options.aindex.rules.dist) + const names = new Set() + + for (const match of listFiles(sourceDir, [`**/*${SOURCE_PROMPT_EXTENSION}`, `**/*${MDX_EXTENSION}`])) names.add(stripPromptExtension(match)) + + for (const match of listFiles(distDir, [`**/*${MDX_EXTENSION}`])) names.add(stripPromptExtension(match)) + + return [...names].sort().map(name => `${kind}:${name}`) +} + +function collectSkillPromptIds(env: ResolvedPromptEnvironment): string[] { + const sourceRoot = path.join(env.aindexDir, env.options.aindex.skills.src) + const distRoot = path.join(env.aindexDir, env.options.aindex.skills.dist) + const skillNames = new Set() + + if (fs.existsSync(sourceRoot) && fs.statSync(sourceRoot).isDirectory()) { + for (const entry of fs.readdirSync(sourceRoot, {withFileTypes: true})) { + if (entry.isDirectory()) skillNames.add(entry.name) + } + } + + if (fs.existsSync(distRoot) && fs.statSync(distRoot).isDirectory()) { + for (const entry of fs.readdirSync(distRoot, {withFileTypes: true})) { + if (entry.isDirectory()) skillNames.add(entry.name) + } + } + + const promptIds: string[] = [] + + for (const skillName of [...skillNames].sort()) { + promptIds.push(`skill:${skillName}`) + + const sourceDir = path.join(sourceRoot, skillName) + const distDir = path.join(distRoot, skillName) + const childNames = new Set() + + for (const match of listFiles(sourceDir, [`**/*${SOURCE_PROMPT_EXTENSION}`, `**/*${MDX_EXTENSION}`])) { + const stripped = stripPromptExtension(match) + if (stripped === SKILL_ENTRY_FILE_NAME) continue + childNames.add(stripped) + } + + for (const match of listFiles(distDir, [`**/*${MDX_EXTENSION}`])) { + const stripped = stripPromptExtension(match) + if (stripped === SKILL_ENTRY_FILE_NAME) continue + childNames.add(stripped) + } + + for (const childName of [...childNames].sort()) promptIds.push(`skill-child-doc:${skillName}/${childName}`) + } + + return promptIds +} + +function collectProjectPromptIds(env: ResolvedPromptEnvironment): string[] { + const promptIds: string[] = [] + + for (const series of resolveAindexProjectSeriesConfigs(env.options)) { + const sourceRoot = path.join(env.aindexDir, series.src) + const distRoot = path.join(env.aindexDir, series.dist) + const relativeDirs = new Set() + + for (const match of listFiles(sourceRoot, [`**/${PROJECT_MEMORY_FILE_NAME}${SOURCE_PROMPT_EXTENSION}`, `**/${PROJECT_MEMORY_FILE_NAME}${MDX_EXTENSION}`])) { + const directory = normalizeSlashPath(path.posix.dirname(normalizeSlashPath(match))) + if (directory !== '.') relativeDirs.add(directory) + } + + for (const match of listFiles(distRoot, [`**/${PROJECT_MEMORY_FILE_NAME}${MDX_EXTENSION}`])) { + const directory = normalizeSlashPath(path.posix.dirname(normalizeSlashPath(match))) + if (directory !== '.') relativeDirs.add(directory) + } + + for (const relativeDir of [...relativeDirs].sort()) { + const [projectName, ...rest] = relativeDir.split('/') + const childPath = rest.join('/') + if (projectName == null || projectName.length === 0) continue + + promptIds.push(childPath.length === 0 + ? `project-memory:${series.name}/${projectName}` + : `project-child-memory:${series.name}/${projectName}/${childPath}`) + } + } + + return promptIds +} + +function collectDiscoveredPromptIds(env: ResolvedPromptEnvironment): string[] { + const promptIds = new Set() + const globalDefinition = buildGlobalMemoryDefinition(env) + const workspaceDefinition = buildWorkspaceMemoryDefinition(env) + + if ( + fs.existsSync(globalDefinition.paths.zh) + || fs.existsSync(globalDefinition.paths.en) + || fs.existsSync(globalDefinition.paths.dist) + ) { + promptIds.add(globalDefinition.promptId) + } + + if ( + fs.existsSync(workspaceDefinition.paths.zh) + || fs.existsSync(workspaceDefinition.paths.en) + || fs.existsSync(workspaceDefinition.paths.dist) + ) { + promptIds.add(workspaceDefinition.promptId) + } + + for (const promptId of collectProjectPromptIds(env)) promptIds.add(promptId) + for (const promptId of collectSkillPromptIds(env)) promptIds.add(promptId) + for (const promptId of collectFlatPromptIds(env, 'command')) promptIds.add(promptId) + for (const promptId of collectFlatPromptIds(env, 'subagent')) promptIds.add(promptId) + for (const promptId of collectFlatPromptIds(env, 'rule')) promptIds.add(promptId) + + return [...promptIds].sort() +} + +function parseFrontMatter(content: string): YAMLFrontMatter | undefined { + try { + return parseMarkdown(content).yamlFrontMatter + } + catch { + return void 0 + } +} + +function readArtifact( + filePath: string, + includeContent: boolean, + legacySource: boolean = false +): PromptArtifactRecord | undefined { + if (!(fs.existsSync(filePath) && fs.statSync(filePath).isFile())) return void 0 + + const stat = fs.statSync(filePath) + const rawContent = includeContent ? fs.readFileSync(filePath, 'utf8') : void 0 + + const artifact: PromptArtifactRecord = { + path: filePath, + exists: true, + mtime: stat.mtime.toISOString(), + mtimeMs: stat.mtimeMs, + size: stat.size, + ...legacySource ? {legacySource: true} : {}, + ...rawContent != null ? {content: rawContent} : {} + } + + const frontMatter = rawContent != null ? parseFrontMatter(rawContent) : void 0 + if (frontMatter != null) Object.assign(artifact, {frontMatter}) + + return artifact +} + +function resolveArtifactStatus( + zhArtifact: PromptArtifactRecord | undefined, + targetArtifact: PromptArtifactRecord | undefined +): PromptArtifactState { + if (targetArtifact == null) return 'missing' + if (zhArtifact != null && targetArtifact.mtimeMs < zhArtifact.mtimeMs) return 'stale' + return 'ready' +} + +function hydratePrompt( + definition: PromptDefinition, + includeContent: boolean +): PromptDetails | null { + const hasCanonicalZh = fs.existsSync(definition.paths.zh) + const {legacyZhPath} = definition + const hasLegacyZh = !hasCanonicalZh + && legacyZhPath != null + && fs.existsSync(legacyZhPath) + const zhArtifactPath = hasCanonicalZh + ? definition.paths.zh + : hasLegacyZh + ? legacyZhPath + : void 0 + const zhArtifact = zhArtifactPath != null + ? readArtifact(zhArtifactPath, includeContent, hasLegacyZh) + : void 0 + const enArtifact = hasCanonicalZh || legacyZhPath !== definition.paths.en + ? readArtifact(definition.paths.en, includeContent) + : void 0 + const distArtifact = readArtifact(definition.paths.dist, includeContent) + + if (zhArtifact == null && enArtifact == null && distArtifact == null) return null + + const updatedAt = [zhArtifact, enArtifact, distArtifact] + .filter((artifact): artifact is PromptArtifactRecord => artifact != null) + .sort((a, b) => b.mtimeMs - a.mtimeMs)[0] + ?.mtime + + const prompt: PromptDetails = { + promptId: definition.promptId, + kind: definition.kind, + logicalName: definition.logicalName, + paths: definition.paths, + exists: { + zh: zhArtifact != null, + en: enArtifact != null, + dist: distArtifact != null + }, + enStatus: resolveArtifactStatus(zhArtifact, enArtifact), + distStatus: resolveArtifactStatus(zhArtifact, distArtifact), + ...updatedAt != null ? {updatedAt} : {}, + ...zhArtifact?.legacySource === true ? {legacyZhSource: true} : {}, + src: { + ...zhArtifact != null ? {zh: zhArtifact} : {}, + ...enArtifact != null ? {en: enArtifact} : {} + } + } + + if (distArtifact != null) Object.assign(prompt, {dist: distArtifact}) + + const frontMatter = zhArtifact?.frontMatter ?? enArtifact?.frontMatter ?? distArtifact?.frontMatter + if (frontMatter != null) Object.assign(prompt, {frontMatter}) + + return prompt +} + +function matchesFilter( + value: T, + allowed: readonly T[] | undefined +): boolean { + if (allowed == null || allowed.length === 0) return true + return allowed.includes(value) +} + +function matchesQuery(item: PromptCatalogItem, query: string | undefined): boolean { + if (query == null || query.trim().length === 0) return true + const normalizedQuery = query.trim().toLowerCase() + return item.promptId.toLowerCase().includes(normalizedQuery) + || item.logicalName.toLowerCase().includes(normalizedQuery) +} + +function toCatalogItem(prompt: PromptDetails): PromptCatalogItem { + return { + promptId: prompt.promptId, + kind: prompt.kind, + logicalName: prompt.logicalName, + paths: prompt.paths, + exists: prompt.exists, + enStatus: prompt.enStatus, + distStatus: prompt.distStatus, + ...prompt.updatedAt != null ? {updatedAt: prompt.updatedAt} : {}, + ...prompt.legacyZhSource === true ? {legacyZhSource: true} : {} + } +} + +function isProjectMemoryDefinition(definition: PromptDefinition): boolean { + return LEGACY_PROJECT_MEMORY_KINDS.has(definition.kind) +} + +function writeTextFile(filePath: string, content: string): void { + fs.mkdirSync(path.dirname(filePath), {recursive: true}) + fs.writeFileSync(filePath, content, 'utf8') +} + +function prepareProjectMemoryForEnglishWrite(definition: PromptDefinition): void { + if (!isProjectMemoryDefinition(definition)) return + if (fs.existsSync(definition.paths.zh)) return + if (definition.legacyZhPath == null || !fs.existsSync(definition.legacyZhPath)) return + + const legacyContent = fs.readFileSync(definition.legacyZhPath, 'utf8') + writeTextFile(definition.paths.zh, legacyContent) +} + +function migrateLegacyProjectMemorySourceOnZhWrite(definition: PromptDefinition): void { + if (!isProjectMemoryDefinition(definition)) return + if (definition.legacyZhPath == null || definition.legacyZhPath === definition.paths.zh) return + if (!fs.existsSync(definition.legacyZhPath)) return + + fs.rmSync(definition.legacyZhPath, {force: true}) +} + +export async function listPrompts( + options: ListPromptsOptions = {} +): Promise { + const env = resolvePromptEnvironment(options) + const items = collectDiscoveredPromptIds(env) + .map(promptId => hydratePrompt(buildPromptDefinitionFromId(promptId, env), false)) + .filter((item): item is PromptDetails => item != null) + .map(toCatalogItem) + .filter(item => matchesFilter(item.kind, options.kinds)) + .filter(item => matchesFilter(item.enStatus, options.enStatus)) + .filter(item => matchesFilter(item.distStatus, options.distStatus)) + .filter(item => matchesQuery(item, options.query)) + + return items.sort((a, b) => a.promptId.localeCompare(b.promptId)) +} + +export async function getPrompt( + promptId: string, + options: PromptServiceOptions = {} +): Promise { + const env = resolvePromptEnvironment(options) + return hydratePrompt(buildPromptDefinitionFromId(promptId, env), true) +} + +export async function upsertPromptSource( + input: UpsertPromptSourceInput +): Promise { + const env = resolvePromptEnvironment(input) + const locale = input.locale ?? 'zh' + const definition = buildPromptDefinitionFromId(input.promptId, env) + + if (locale === 'zh') { + writeTextFile(definition.paths.zh, input.content) + migrateLegacyProjectMemorySourceOnZhWrite(definition) + } else { + prepareProjectMemoryForEnglishWrite(definition) + writeTextFile(definition.paths.en, input.content) + } + + const prompt = hydratePrompt(definition, true) + if (prompt == null) throw new Error(`Failed to load prompt after write: ${input.promptId}`) + return prompt +} + +export async function writePromptArtifacts( + input: WritePromptArtifactsInput +): Promise { + if (input.enContent == null && input.distContent == null) throw new Error('writePromptArtifacts requires enContent or distContent') + + const env = resolvePromptEnvironment(input) + const definition = buildPromptDefinitionFromId(input.promptId, env) + + if (input.enContent != null) { + prepareProjectMemoryForEnglishWrite(definition) + writeTextFile(definition.paths.en, input.enContent) + } + + if (input.distContent != null) writeTextFile(definition.paths.dist, input.distContent) + + const prompt = hydratePrompt(definition, true) + if (prompt == null) throw new Error(`Failed to load prompt after write: ${input.promptId}`) + return prompt +} + +export async function resolvePromptDefinition( + promptId: string, + options: PromptServiceOptions = {} +): Promise { + const env = resolvePromptEnvironment(options) + return buildPromptDefinitionFromId(promptId, env).paths +} diff --git a/sdk/src/public-config-paths.ts b/sdk/src/public-config-paths.ts new file mode 100644 index 00000000..475c3526 --- /dev/null +++ b/sdk/src/public-config-paths.ts @@ -0,0 +1,208 @@ +import type {IDEKind} from './plugins/plugin-core/enums' +import type {ProjectIDEConfigFile} from './plugins/plugin-core/InputTypes' +import * as fs from 'node:fs' +import * as path from 'node:path' +import process from 'node:process' +import {resolvePublicPath} from '@truenine/script-runtime' +import {AINDEX_FILE_NAMES} from './plugins/plugin-core/AindexTypes' +import {FilePathKind} from './plugins/plugin-core/enums' + +export const PUBLIC_CONFIG_DEFINITION_DIR = 'public' +export const PUBLIC_PROXY_FILE_NAME = 'proxy.ts' + +export const PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH = '.gitignore' +export const PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH = '.git/info/exclude' + +export const AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS = [ + AINDEX_FILE_NAMES.QODER_IGNORE, + AINDEX_FILE_NAMES.CURSOR_IGNORE, + AINDEX_FILE_NAMES.WARP_INDEX_IGNORE, + AINDEX_FILE_NAMES.AI_IGNORE, + AINDEX_FILE_NAMES.CODEIUM_IGNORE, + '.kiroignore', + '.traeignore' +] as const + +export const KNOWN_PUBLIC_CONFIG_TARGET_RELATIVE_PATHS = [ + PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, + PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, + AINDEX_FILE_NAMES.EDITOR_CONFIG, + AINDEX_FILE_NAMES.VSCODE_SETTINGS, + AINDEX_FILE_NAMES.VSCODE_EXTENSIONS, + AINDEX_FILE_NAMES.ZED_SETTINGS, + AINDEX_FILE_NAMES.IDEA_PROJECT_XML, + AINDEX_FILE_NAMES.IDEA_CODE_STYLE_CONFIG_XML, + AINDEX_FILE_NAMES.IDEA_GITIGNORE, + ...AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS +] as const + +export interface PublicDefinitionResolveOptions { + readonly command?: ProxyCommand | undefined + readonly workspaceDir?: string | undefined +} + +type ProxyCommand = 'execute' | 'dry-run' | 'clean' | 'plugins' + +interface ProxyContext { + readonly cwd: string + readonly workspaceDir: string + readonly aindexDir: string + readonly command: ProxyCommand + readonly platform: NodeJS.Platform +} + +const publicDefinitionPathCache = new Map() + +function normalizeTargetRelativePath(targetRelativePath: string): string { + const normalizedPath = targetRelativePath + .split(/[\\/]+/) + .filter(segment => segment.length > 0) + .join('/') + + if (normalizedPath.length === 0) + { throw new Error('public target relative path cannot be empty') } + return normalizedPath +} + +function getPublicRootDir(aindexDir: string): string { + return path.join(aindexDir, PUBLIC_CONFIG_DEFINITION_DIR) +} + +function getPublicProxyPath(aindexDir: string): string { + return path.join(getPublicRootDir(aindexDir), PUBLIC_PROXY_FILE_NAME) +} + +function getResolveCommand( + options?: PublicDefinitionResolveOptions +): ProxyCommand { + return options?.command ?? 'execute' +} + +function getResolveWorkspaceDir( + aindexDir: string, + options?: PublicDefinitionResolveOptions +): string { + return path.resolve(options?.workspaceDir ?? path.dirname(aindexDir)) +} + +function buildProxyContext( + aindexDir: string, + workspaceDir: string, + command: ProxyCommand +): ProxyContext { + const resolvedAindexDir = path.resolve(aindexDir) + + return { + cwd: workspaceDir, + workspaceDir, + aindexDir: resolvedAindexDir, + command, + platform: process.platform + } +} + +function resolvePublicPathForDefinition( + filePath: string, + ctx: ProxyContext, + logicalPath: string +): string { + // `tsc` resolves this workspace package correctly, but ESLint's type-aware rules + // sometimes treat it as an error-typed export during monorepo lint execution. + return resolvePublicPath(filePath, ctx, logicalPath) +} + +function resolvePublicDefinitionRelativePath( + aindexDir: string, + targetRelativePath: string, + options?: PublicDefinitionResolveOptions +): string { + const normalizedTargetPath = normalizeTargetRelativePath(targetRelativePath) + if (normalizedTargetPath === PUBLIC_PROXY_FILE_NAME) + { return PUBLIC_PROXY_FILE_NAME } + + const proxyFilePath = getPublicProxyPath(aindexDir) + if (!(fs.existsSync(proxyFilePath) && fs.statSync(proxyFilePath).isFile())) + { return normalizedTargetPath } + + const command = getResolveCommand(options) + const workspaceDir = getResolveWorkspaceDir(aindexDir, options) + const cacheKey = [ + proxyFilePath, + workspaceDir, + command, + normalizedTargetPath + ].join('::') + const cachedPath = publicDefinitionPathCache.get(cacheKey) + if (cachedPath != null) return cachedPath + + const resolvedRelativePath = resolvePublicPathForDefinition( + proxyFilePath, + buildProxyContext(aindexDir, workspaceDir, command), + normalizedTargetPath + ) + + publicDefinitionPathCache.set(cacheKey, resolvedRelativePath) + return resolvedRelativePath +} + +export function resolvePublicDefinitionPath( + aindexDir: string, + targetRelativePath: string, + options?: PublicDefinitionResolveOptions +): string { + const resolvedRelativePath = resolvePublicDefinitionRelativePath( + aindexDir, + targetRelativePath, + options + ) + return path.join( + getPublicRootDir(aindexDir), + ...resolvedRelativePath.split(/[\\/]+/) + ) +} + +export function collectKnownPublicConfigDefinitionPaths( + aindexDir: string, + options?: PublicDefinitionResolveOptions +): string[] { + const resolvedPaths = new Set([ + resolvePublicDefinitionPath(aindexDir, PUBLIC_PROXY_FILE_NAME) + ]) + + for (const targetRelativePath of KNOWN_PUBLIC_CONFIG_TARGET_RELATIVE_PATHS) { + resolvedPaths.add( + resolvePublicDefinitionPath(aindexDir, targetRelativePath, options) + ) + } + + return [...resolvedPaths] +} + +export function readPublicIdeConfigDefinitionFile( + type: T, + targetRelativePath: string, + aindexDir: string, + fs: typeof import('node:fs'), + options?: PublicDefinitionResolveOptions +): ProjectIDEConfigFile | undefined { + const absolutePath = resolvePublicDefinitionPath( + aindexDir, + targetRelativePath, + options + ) + if (!(fs.existsSync(absolutePath) && fs.statSync(absolutePath).isFile())) + { return void 0 } + + const content = fs.readFileSync(absolutePath, 'utf8') + return { + type, + content, + length: content.length, + filePathKind: FilePathKind.Absolute, + dir: { + pathKind: FilePathKind.Absolute, + path: absolutePath, + getDirectoryName: () => path.basename(absolutePath) + } + } +} diff --git a/sdk/src/runtime-environment.test.ts b/sdk/src/runtime-environment.test.ts new file mode 100644 index 00000000..0bdeb63f --- /dev/null +++ b/sdk/src/runtime-environment.test.ts @@ -0,0 +1,149 @@ +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {afterEach, describe, expect, it} from 'vitest' +import { + getRequiredGlobalConfigPath, + resolveRuntimeEnvironment, + resolveUserPath +} from './runtime-environment' + +describe('runtime environment', () => { + let tempDir: string | undefined + + afterEach(() => { + if (tempDir != null) fs.rmSync(tempDir, {recursive: true, force: true}) + tempDir = void 0 + }) + + it('uses the native Windows home config path when running on Windows', () => { + tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-win-runtime-')) + const windowsHomeDir = path.join(tempDir, 'WindowsHome') + const configPath = path.join(windowsHomeDir, '.aindex', '.tnmsc.json') + + fs.mkdirSync(path.dirname(configPath), {recursive: true}) + fs.writeFileSync(configPath, '{}\n', 'utf8') + + const runtimeEnvironment = resolveRuntimeEnvironment({ + fs, + platform: 'win32', + env: { + USERPROFILE: windowsHomeDir + }, + homedir: windowsHomeDir + }) + + expect(runtimeEnvironment.isWsl).toBe(false) + expect(runtimeEnvironment.selectedGlobalConfigPath).toBeUndefined() + expect(runtimeEnvironment.effectiveHomeDir).toBe(windowsHomeDir) + expect(getRequiredGlobalConfigPath({ + fs, + platform: 'win32', + env: { + USERPROFILE: windowsHomeDir + }, + homedir: windowsHomeDir + })).toBe(configPath) + expect(resolveUserPath('~/.codex/config.toml', { + fs, + platform: 'win32', + env: { + USERPROFILE: windowsHomeDir + }, + homedir: windowsHomeDir + })).toBe(path.win32.join(windowsHomeDir, '.codex', 'config.toml')) + }) + + it('selects the host config path that matches the current Windows profile in WSL', () => { + tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-wsl-runtime-')) + const usersRoot = path.join(tempDir, 'Users') + const alphaConfigPath = path.join(usersRoot, 'alpha', '.aindex', '.tnmsc.json') + const bravoConfigPath = path.join(usersRoot, 'bravo', '.aindex', '.tnmsc.json') + + fs.mkdirSync(path.dirname(alphaConfigPath), {recursive: true}) + fs.mkdirSync(path.dirname(bravoConfigPath), {recursive: true}) + fs.writeFileSync(alphaConfigPath, '{}\n', 'utf8') + fs.writeFileSync(bravoConfigPath, '{}\n', 'utf8') + + const runtimeEnvironment = resolveRuntimeEnvironment({ + fs, + platform: 'linux', + env: { + WSL_DISTRO_NAME: 'Ubuntu', + USERPROFILE: path.join(usersRoot, 'bravo') + }, + homedir: '/home/linux-user', + windowsUsersRoot: usersRoot + }) + + expect(runtimeEnvironment.isWsl).toBe(true) + expect(runtimeEnvironment.selectedGlobalConfigPath).toBe(bravoConfigPath) + expect(runtimeEnvironment.effectiveHomeDir).toBe(path.join(usersRoot, 'bravo').replaceAll('\\', '/')) + expect(getRequiredGlobalConfigPath({ + fs, + platform: 'linux', + env: { + WSL_DISTRO_NAME: 'Ubuntu', + USERPROFILE: path.join(usersRoot, 'bravo') + }, + homedir: '/home/linux-user', + windowsUsersRoot: usersRoot + })).toBe(bravoConfigPath) + }) + + it('fails when the discovered config belongs to another Windows profile', () => { + tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-wsl-runtime-mismatch-')) + const usersRoot = path.join(tempDir, 'Users') + const alphaConfigPath = path.join(usersRoot, 'alpha', '.aindex', '.tnmsc.json') + + fs.mkdirSync(path.dirname(alphaConfigPath), {recursive: true}) + fs.writeFileSync(alphaConfigPath, '{}\n', 'utf8') + + expect(() => getRequiredGlobalConfigPath({ + fs, + platform: 'linux', + env: { + WSL_DISTRO_NAME: 'Ubuntu', + USERPROFILE: path.join(usersRoot, 'bravo') + }, + homedir: '/home/linux-user', + windowsUsersRoot: usersRoot + })).toThrow('current Windows user') + }) + + it('fails when WSL is active but no host config exists', () => { + tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-wsl-runtime-missing-')) + + expect(() => getRequiredGlobalConfigPath({ + fs, + platform: 'linux', + env: {WSL_DISTRO_NAME: 'Ubuntu'}, + homedir: '/home/linux-user', + windowsUsersRoot: path.join(tempDir, 'Users') + })).toThrow('WSL host config file not found') + }) + + it('maps host-home, windows drive, and environment-variable paths for WSL workloads', () => { + const runtimeEnvironment = { + platform: 'linux', + isWsl: true, + nativeHomeDir: '/home/linux-user', + effectiveHomeDir: '/mnt/c/Users/alpha', + globalConfigCandidates: ['/mnt/c/Users/alpha/.aindex/.tnmsc.json'], + selectedGlobalConfigPath: '/mnt/c/Users/alpha/.aindex/.tnmsc.json', + wslHostHomeDir: '/mnt/c/Users/alpha', + windowsUsersRoot: '/mnt/c/Users', + expandedEnv: { + HOME: '/mnt/c/Users/alpha', + USERPROFILE: '/mnt/c/Users/alpha', + HOMEDRIVE: 'C:', + HOMEPATH: '\\Users\\alpha' + } + } as const + + expect(resolveUserPath('~/workspace\\foo', runtimeEnvironment)).toBe('/mnt/c/Users/alpha/workspace/foo') + expect(resolveUserPath('C:\\Work\\Repo', runtimeEnvironment)).toBe('/mnt/c/Work/Repo') + expect(resolveUserPath('%USERPROFILE%\\workspace\\bar', runtimeEnvironment)).toBe('/mnt/c/Users/alpha/workspace/bar') + expect(resolveUserPath('$HOME/workspace/baz', runtimeEnvironment)).toBe('/mnt/c/Users/alpha/workspace/baz') + }) +}) diff --git a/sdk/src/runtime-environment.ts b/sdk/src/runtime-environment.ts new file mode 100644 index 00000000..7c2db229 --- /dev/null +++ b/sdk/src/runtime-environment.ts @@ -0,0 +1,361 @@ +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import process from 'node:process' + +export const DEFAULT_WSL_WINDOWS_USERS_ROOT = '/mnt/c/Users' +export const DEFAULT_GLOBAL_CONFIG_DIR = '.aindex' +export const DEFAULT_GLOBAL_CONFIG_FILE_NAME = '.tnmsc.json' + +const WINDOWS_DRIVE_PATH_PATTERN = /^[A-Za-z]:[\\/]/u +const PERCENT_ENV_PATTERN = /%([^%]+)%/gu +const BRACED_ENV_PATTERN = /\$\{([A-Za-z_]\w*)\}/gu +const SHELL_ENV_PATTERN = /\$([A-Za-z_]\w*)/gu + +type RuntimeFs = Pick + +export interface RuntimeEnvironmentDependencies { + readonly fs?: RuntimeFs + readonly env?: NodeJS.ProcessEnv + readonly platform?: NodeJS.Platform + readonly homedir?: string + readonly release?: string + readonly windowsUsersRoot?: string +} + +export interface RuntimeEnvironmentContext { + readonly platform: NodeJS.Platform + readonly isWsl: boolean + readonly nativeHomeDir: string + readonly effectiveHomeDir: string + readonly globalConfigCandidates: readonly string[] + readonly selectedGlobalConfigPath?: string + readonly wslHostHomeDir?: string + readonly windowsUsersRoot: string + readonly expandedEnv: Readonly> +} + +function isRuntimeEnvironmentContext( + value: RuntimeEnvironmentDependencies | RuntimeEnvironmentContext | undefined +): value is RuntimeEnvironmentContext { + return value != null + && 'effectiveHomeDir' in value + && 'expandedEnv' in value +} + +function getFs(dependencies?: RuntimeEnvironmentDependencies): RuntimeFs { + return dependencies?.fs ?? fs +} + +function getPlatform(dependencies?: RuntimeEnvironmentDependencies): NodeJS.Platform { + return dependencies?.platform ?? process.platform +} + +function getRelease(dependencies?: RuntimeEnvironmentDependencies): string { + return dependencies?.release ?? os.release() +} + +function getNativeHomeDir(dependencies?: RuntimeEnvironmentDependencies): string { + return dependencies?.homedir ?? os.homedir() +} + +function getEnv(dependencies?: RuntimeEnvironmentDependencies): NodeJS.ProcessEnv { + return dependencies?.env ?? process.env +} + +function getWindowsUsersRoot(dependencies?: RuntimeEnvironmentDependencies): string { + return dependencies?.windowsUsersRoot ?? DEFAULT_WSL_WINDOWS_USERS_ROOT +} + +function normalizePosixLikePath(rawPath: string): string { + return path.posix.normalize(rawPath.replaceAll('\\', '/')) +} + +function isSameOrChildPath(candidatePath: string, parentPath: string): boolean { + const normalizedCandidate = normalizePosixLikePath(candidatePath) + const normalizedParent = normalizePosixLikePath(parentPath) + + if (normalizedCandidate === normalizedParent) return true + return normalizedCandidate.startsWith(`${normalizedParent}/`) +} + +function resolveWslHostHomeCandidate( + rawPath: string | undefined, + usersRoot: string +): string | undefined { + if (typeof rawPath !== 'string') return void 0 + + const trimmedPath = rawPath.trim() + if (trimmedPath.length === 0) return void 0 + + const candidatePaths = [ + convertWindowsPathToWsl(trimmedPath), + normalizePosixLikePath(trimmedPath) + ] + + for (const candidatePath of candidatePaths) { + if (candidatePath == null) continue + if (isSameOrChildPath(candidatePath, usersRoot)) return normalizePosixLikePath(candidatePath) + } + + return void 0 +} + +function getPreferredWslHostHomeDirs( + dependencies?: RuntimeEnvironmentDependencies +): string[] { + const env = getEnv(dependencies) + const usersRoot = normalizePosixLikePath(getWindowsUsersRoot(dependencies)) + const homeDrive = env['HOMEDRIVE'] + const homePath = env['HOMEPATH'] + const preferredHomeDirs = [ + resolveWslHostHomeCandidate(env['USERPROFILE'], usersRoot), + typeof homeDrive === 'string' && homeDrive.length > 0 && typeof homePath === 'string' && homePath.length > 0 + ? resolveWslHostHomeCandidate(`${homeDrive}${homePath}`, usersRoot) + : void 0, + resolveWslHostHomeCandidate(env['HOME'], usersRoot) + ] + + return [...new Set(preferredHomeDirs.filter((candidate): candidate is string => candidate != null))] +} + +function getWslHostHomeDirForConfigPath(configPath: string): string { + const normalizedConfigPath = normalizePosixLikePath(configPath) + return path.posix.dirname(path.posix.dirname(normalizedConfigPath)) +} + +function selectWslHostGlobalConfigPath( + globalConfigCandidates: readonly string[], + dependencies?: RuntimeEnvironmentDependencies +): string | undefined { + const preferredHomeDirs = getPreferredWslHostHomeDirs(dependencies) + + if (preferredHomeDirs.length <= 0) return globalConfigCandidates.length === 1 ? globalConfigCandidates[0] : void 0 + + for (const preferredHomeDir of preferredHomeDirs) { + const matchedCandidate = globalConfigCandidates.find(candidatePath => + getWslHostHomeDirForConfigPath(candidatePath) === preferredHomeDir) + if (matchedCandidate != null) return matchedCandidate + } + return void 0 +} + +function isDirectory(fsImpl: RuntimeFs, targetPath: string): boolean { + try { + return fsImpl.statSync(targetPath).isDirectory() + } + catch { + return false + } +} + +function isFile(fsImpl: RuntimeFs, targetPath: string): boolean { + try { + return fsImpl.statSync(targetPath).isFile() + } + catch { + return false + } +} + +function getPathModule(platform: NodeJS.Platform): typeof path.posix | typeof path.win32 { + return platform === 'win32' ? path.win32 : path.posix +} + +function buildExpandedEnv( + rawEnv: NodeJS.ProcessEnv, + nativeHomeDir: string, + effectiveHomeDir: string +): Readonly> { + const expandedEnv: Record = {} + + for (const [key, value] of Object.entries(rawEnv)) { + if (typeof value === 'string') expandedEnv[key] = value + } + + if (effectiveHomeDir === nativeHomeDir) return expandedEnv + + expandedEnv['HOME'] = effectiveHomeDir + expandedEnv['USERPROFILE'] = effectiveHomeDir + const hostHomeMatch = /^\/mnt\/([a-zA-Z])\/(.+)$/u.exec(effectiveHomeDir) + if (hostHomeMatch == null) return expandedEnv + + const driveLetter = hostHomeMatch[1] + const relativePath = hostHomeMatch[2] + if (driveLetter == null || relativePath == null) return expandedEnv + expandedEnv['HOMEDRIVE'] = `${driveLetter.toUpperCase()}:` + expandedEnv['HOMEPATH'] = `\\${relativePath.replaceAll('/', '\\')}` + return expandedEnv +} + +function expandEnvironmentVariables( + rawPath: string, + environment: Readonly> +): string { + const replaceValue = (match: string, key: string): string => environment[key] ?? match + + return rawPath + .replaceAll(PERCENT_ENV_PATTERN, replaceValue) + .replaceAll(BRACED_ENV_PATTERN, replaceValue) + .replaceAll(SHELL_ENV_PATTERN, replaceValue) +} + +function expandHomeDirectory( + rawPath: string, + homeDir: string, + platform: NodeJS.Platform +): string { + if (rawPath === '~') return homeDir + if (!(rawPath.startsWith('~/') || rawPath.startsWith('~\\'))) return rawPath + + const pathModule = getPathModule(platform) + const normalizedSuffix = platform === 'win32' + ? rawPath.slice(2).replaceAll('/', '\\') + : rawPath.slice(2).replaceAll('\\', '/') + + return pathModule.resolve(homeDir, normalizedSuffix) +} + +function convertWindowsPathToWsl(rawPath: string): string | undefined { + if (!WINDOWS_DRIVE_PATH_PATTERN.test(rawPath)) return void 0 + + const driveLetter = rawPath.slice(0, 1).toLowerCase() + const relativePath = rawPath + .slice(2) + .replaceAll('\\', '/') + .replace(/^\/+/u, '') + + const basePath = `/mnt/${driveLetter}` + if (relativePath.length === 0) return basePath + return path.posix.join(basePath, relativePath) +} + +function normalizeResolvedPath(rawPath: string, platform: NodeJS.Platform): string { + if (platform === 'win32') return path.win32.normalize(rawPath.replaceAll('/', '\\')) + return path.posix.normalize(rawPath) +} + +export function isWslRuntime( + dependencies?: RuntimeEnvironmentDependencies +): boolean { + if (getPlatform(dependencies) !== 'linux') return false + + const env = getEnv(dependencies) + if (typeof env['WSL_DISTRO_NAME'] === 'string' && env['WSL_DISTRO_NAME'].length > 0) return true + if (typeof env['WSL_INTEROP'] === 'string' && env['WSL_INTEROP'].length > 0) return true + + return getRelease(dependencies).toLowerCase().includes('microsoft') +} + +export function findWslHostGlobalConfigPaths( + dependencies?: RuntimeEnvironmentDependencies +): string[] { + const fsImpl = getFs(dependencies) + const usersRoot = getWindowsUsersRoot(dependencies) + + if (!isDirectory(fsImpl, usersRoot)) return [] + + try { + const dirEntries = fsImpl.readdirSync(usersRoot, {withFileTypes: true}) + const candidates = dirEntries + .filter(dirEntry => dirEntry.isDirectory()) + .map(dirEntry => path.join(usersRoot, dirEntry.name, DEFAULT_GLOBAL_CONFIG_DIR, DEFAULT_GLOBAL_CONFIG_FILE_NAME)) + .filter(candidatePath => fsImpl.existsSync(candidatePath) && isFile(fsImpl, candidatePath)) + + candidates.sort((a, b) => a.localeCompare(b)) + return candidates + } + catch { + return [] + } +} + +export function resolveRuntimeEnvironment( + dependencies?: RuntimeEnvironmentDependencies +): RuntimeEnvironmentContext { + const platform = getPlatform(dependencies) + const nativeHomeDir = getNativeHomeDir(dependencies) + const wslRuntime = isWslRuntime(dependencies) + const globalConfigCandidates = wslRuntime ? findWslHostGlobalConfigPaths(dependencies) : [] + const selectedGlobalConfigPath = wslRuntime + ? selectWslHostGlobalConfigPath(globalConfigCandidates, dependencies) + : void 0 + const effectiveHomeDir = selectedGlobalConfigPath != null + ? getWslHostHomeDirForConfigPath(selectedGlobalConfigPath) + : nativeHomeDir + + return { + platform, + isWsl: wslRuntime, + nativeHomeDir, + effectiveHomeDir, + globalConfigCandidates, + ...selectedGlobalConfigPath != null && {selectedGlobalConfigPath}, + ...selectedGlobalConfigPath != null && {wslHostHomeDir: effectiveHomeDir}, + windowsUsersRoot: getWindowsUsersRoot(dependencies), + expandedEnv: buildExpandedEnv(getEnv(dependencies), nativeHomeDir, effectiveHomeDir) + } +} + +export function getEffectiveHomeDir( + dependencies?: RuntimeEnvironmentDependencies +): string { + return resolveRuntimeEnvironment(dependencies).effectiveHomeDir +} + +export function getGlobalConfigPath( + dependencies?: RuntimeEnvironmentDependencies +): string { + const runtimeEnvironment = resolveRuntimeEnvironment(dependencies) + if (runtimeEnvironment.selectedGlobalConfigPath != null) return runtimeEnvironment.selectedGlobalConfigPath + + return path.join( + runtimeEnvironment.effectiveHomeDir, + DEFAULT_GLOBAL_CONFIG_DIR, + DEFAULT_GLOBAL_CONFIG_FILE_NAME + ) +} + +export function getRequiredGlobalConfigPath( + dependencies?: RuntimeEnvironmentDependencies +): string { + const runtimeEnvironment = resolveRuntimeEnvironment(dependencies) + + if (!runtimeEnvironment.isWsl || runtimeEnvironment.selectedGlobalConfigPath != null) { + return getGlobalConfigPath(dependencies) + } + + const configLookupPattern = `"${runtimeEnvironment.windowsUsersRoot}/*/${DEFAULT_GLOBAL_CONFIG_DIR}/${DEFAULT_GLOBAL_CONFIG_FILE_NAME}"` + if (runtimeEnvironment.globalConfigCandidates.length === 0) { + throw new Error(`WSL host config file not found under ${configLookupPattern}.`) + } + if (getPreferredWslHostHomeDirs(dependencies).length > 0) { + throw new Error(`WSL host config file for the current Windows user was not found under ${configLookupPattern}.`) + } + throw new Error(`WSL host config file could not be matched to the current Windows user under ${configLookupPattern}.`) +} + +export function resolveUserPath( + rawPath: string, + dependenciesOrContext?: RuntimeEnvironmentDependencies | RuntimeEnvironmentContext +): string { + const runtimeEnvironment = isRuntimeEnvironmentContext(dependenciesOrContext) + ? dependenciesOrContext + : resolveRuntimeEnvironment(dependenciesOrContext) + + let resolvedPath = expandEnvironmentVariables(rawPath, runtimeEnvironment.expandedEnv) + resolvedPath = expandHomeDirectory(resolvedPath, runtimeEnvironment.effectiveHomeDir, runtimeEnvironment.platform) + + if (!runtimeEnvironment.isWsl) return normalizeResolvedPath(resolvedPath, runtimeEnvironment.platform) + + const convertedWindowsPath = convertWindowsPathToWsl(resolvedPath) + if (convertedWindowsPath != null) resolvedPath = convertedWindowsPath + else if ( + resolvedPath.startsWith(runtimeEnvironment.effectiveHomeDir) + || resolvedPath.startsWith('/mnt/') + || resolvedPath.startsWith('/') + ) { + resolvedPath = resolvedPath.replaceAll('\\', '/') + } + return normalizeResolvedPath(resolvedPath, runtimeEnvironment.platform) +} diff --git a/sdk/src/schema.ts b/sdk/src/schema.ts new file mode 100644 index 00000000..1ff13cf7 --- /dev/null +++ b/sdk/src/schema.ts @@ -0,0 +1,14 @@ +import {zodToJsonSchema} from 'zod-to-json-schema' +import {ZUserConfigFile} from './plugins/plugin-core' + +/** + * JSON Schema for .tnmsc.json — auto-generated from ZUserConfigFile via zod-to-json-schema. + * Do not edit manually; update ZUserConfigFile in types/ConfigTypes.schema.ts instead. + */ +export const TNMSC_JSON_SCHEMA = zodToJsonSchema(ZUserConfigFile, { + name: 'UserConfigFile', + nameStrategy: 'title', + $refStrategy: 'none', + target: 'jsonSchema7', + definitionPath: '$defs' +}) diff --git a/sdk/src/script-runtime-worker.ts b/sdk/src/script-runtime-worker.ts new file mode 100644 index 00000000..ae6854a6 --- /dev/null +++ b/sdk/src/script-runtime-worker.ts @@ -0,0 +1,19 @@ +import {readFileSync} from 'node:fs' +import process from 'node:process' +import {resolvePublicPathUnchecked} from '@truenine/script-runtime' + +async function main(): Promise { + const [, , filePath, ctxJsonPath, logicalPath] = process.argv + if (filePath == null || ctxJsonPath == null || logicalPath == null) throw new Error('Usage: script-runtime-worker ') + + const ctxJson = readFileSync(ctxJsonPath, 'utf8') + const ctx = JSON.parse(ctxJson) as Parameters[1] + const result = await resolvePublicPathUnchecked(filePath, ctx, logicalPath) + process.stdout.write(`${result}\n`) +} + +main().catch((error: unknown) => { + const message = error instanceof Error ? error.message : String(error) + process.stderr.write(`${message}\n`) + process.exit(1) +}) diff --git a/sdk/src/wsl-mirror-sync.test.ts b/sdk/src/wsl-mirror-sync.test.ts new file mode 100644 index 00000000..d4af7962 --- /dev/null +++ b/sdk/src/wsl-mirror-sync.test.ts @@ -0,0 +1,588 @@ +import type {ILogger, OutputFileDeclaration, OutputPlugin, OutputWriteContext} from './plugins/plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import {describe, expect, it, vi} from 'vitest' +import {PluginKind} from './plugins/plugin-core' +import {syncWindowsConfigIntoWsl} from './wsl-mirror-sync' + +class MemoryMirrorFs { + readonly files = new Map() + + readonly directories = new Set() + + private normalizePath(targetPath: string): string { + if (targetPath.includes('\\') || /^[A-Za-z]:[\\/]/u.test(targetPath)) { + return path.win32.normalize(targetPath) + } + + return path.posix.normalize(targetPath) + } + + private getPathModule(targetPath: string): typeof path.win32 | typeof path.posix { + if (targetPath.includes('\\') || /^[A-Za-z]:[\\/]/u.test(targetPath)) { + return path.win32 + } + + return path.posix + } + + existsSync(targetPath: string): boolean { + const normalizedPath = this.normalizePath(targetPath) + return this.files.has(normalizedPath) || this.directories.has(normalizedPath) + } + + mkdirSync(targetPath: string, options?: {recursive?: boolean}): void { + const pathModule = this.getPathModule(targetPath) + const normalizedPath = pathModule.normalize(targetPath) + + if (options?.recursive === true) { + let currentPath = normalizedPath + while (currentPath.length > 0 && !this.directories.has(currentPath)) { + this.directories.add(currentPath) + const parentPath = pathModule.dirname(currentPath) + if (parentPath === currentPath) break + currentPath = parentPath + } + return + } + + this.directories.add(normalizedPath) + } + + readFileSync(targetPath: string): Buffer { + const normalizedPath = this.normalizePath(targetPath) + const content = this.files.get(normalizedPath) + if (content == null) throw new Error(`ENOENT: ${normalizedPath}`) + return Buffer.from(content) + } + + writeFileSync(targetPath: string, data: string | NodeJS.ArrayBufferView): void { + const pathModule = this.getPathModule(targetPath) + const normalizedPath = pathModule.normalize(targetPath) + this.directories.add(pathModule.dirname(normalizedPath)) + + if (typeof data === 'string') { + this.files.set(normalizedPath, Buffer.from(data, 'utf8')) + return + } + + this.files.set(normalizedPath, Buffer.from(data.buffer, data.byteOffset, data.byteLength)) + } + + seedDirectory(targetPath: string): void { + this.directories.add(this.normalizePath(targetPath)) + } + + seedFile(targetPath: string, content: string): void { + const pathModule = this.getPathModule(targetPath) + const normalizedPath = pathModule.normalize(targetPath) + this.directories.add(pathModule.dirname(normalizedPath)) + this.files.set(normalizedPath, Buffer.from(content, 'utf8')) + } +} + +interface RecordedLogger extends ILogger { + readonly infoMessages: string[] +} + +function createLogger(): RecordedLogger { + const infoMessages: string[] = [] + return { + trace: () => {}, + debug: () => {}, + info: (message: unknown) => { + infoMessages.push(String(message)) + }, + warn: () => {}, + error: () => {}, + fatal: () => {}, + infoMessages + } as RecordedLogger +} + +function createMirrorPlugin(sourcePaths: string | readonly string[] = []): OutputPlugin { + const normalizedPaths = Array.isArray(sourcePaths) ? sourcePaths : [sourcePaths] + + return { + type: PluginKind.Output, + name: 'MirrorPlugin', + log: createLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return [] + }, + async convertContent() { + return '' + }, + async declareWslMirrorFiles() { + return normalizedPaths + .filter(sourcePath => sourcePath.length > 0) + .map(sourcePath => ({sourcePath})) + } + } +} + +function createWriteContext(instances?: string | string[], dryRun: boolean = false): OutputWriteContext { + return { + logger: createLogger(), + dryRun, + runtimeTargets: { + jetbrainsCodexDirs: [] + }, + pluginOptions: { + windows: { + wsl2: { + instances + } + } + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: 'absolute', + path: 'C:\\workspace', + getDirectoryName: () => 'workspace' + }, + projects: [] + } + } + } as unknown as OutputWriteContext +} + +function createPredeclaredOutputs( + plugin: OutputPlugin, + declarations: readonly OutputFileDeclaration[] +): ReadonlyMap { + return new Map([[plugin, declarations]]) +} + +function createGlobalOutputDeclaration( + targetPath: string +): OutputFileDeclaration { + return { + path: targetPath, + scope: 'global', + source: {kind: 'generated'} + } +} + +function createWslSpawnSyncMock( + homesByInstance: Readonly>, + discoveredInstances: readonly string[] = Object.keys(homesByInstance) +) { + return vi.fn((_command: string, args: readonly string[]) => { + if (args[0] === '--list' && args[1] === '--quiet') { + return { + status: 0, + stdout: Buffer.from(discoveredInstances.join('\r\n'), 'utf16le'), + stderr: Buffer.alloc(0) + } + } + + if (args[0] === '-d') { + const instance = args[1] + const linuxHomeDir = instance == null ? void 0 : homesByInstance[instance] + + if (linuxHomeDir == null) { + return { + status: 1, + stdout: Buffer.alloc(0), + stderr: Buffer.from(`distribution "${instance}" not found`, 'utf8') + } + } + + return { + status: 0, + stdout: Buffer.from(linuxHomeDir, 'utf8'), + stderr: Buffer.alloc(0) + } + } + + throw new Error(`Unexpected spawnSync args: ${JSON.stringify(args)}`) + }) +} + +function wasWslListCalled( + spawnSyncMock: ReturnType +): boolean { + return spawnSyncMock.mock.calls.some(([, args]) => Array.isArray(args) && args[0] === '--list' && args[1] === '--quiet') +} + +describe('wsl mirror sync', () => { + it('copies declared host config files into each resolved WSL home', async () => { + const memoryFs = new MemoryMirrorFs() + const hostHomeDir = 'C:\\Users\\alpha' + const sourcePath = path.win32.join(hostHomeDir, '.codex', 'config.toml') + const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' + const targetPath = path.win32.join(targetHomeDir, '.codex', 'config.toml') + + memoryFs.seedFile(sourcePath, 'codex = true\n') + memoryFs.seedDirectory(targetHomeDir) + + const spawnSyncMock = createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) + + const result = await syncWindowsConfigIntoWsl( + [createMirrorPlugin('~/.codex/config.toml')], + createWriteContext('Ubuntu'), + { + fs: memoryFs, + spawnSync: spawnSyncMock as never, + platform: 'win32', + effectiveHomeDir: hostHomeDir + } + ) + + expect(result.errors).toEqual([]) + expect(result.warnings).toEqual([]) + expect(result.mirroredFiles).toBe(1) + expect(memoryFs.readFileSync(targetPath).toString('utf8')).toBe('codex = true\n') + expect(wasWslListCalled(spawnSyncMock)).toBe(false) + }) + + it('copies generated global outputs under the host home into each resolved WSL home', async () => { + const memoryFs = new MemoryMirrorFs() + const hostHomeDir = 'C:\\Users\\alpha' + const sourcePath = path.win32.join(hostHomeDir, '.codex', 'AGENTS.md') + const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' + const targetPath = path.win32.join(targetHomeDir, '.codex', 'AGENTS.md') + const plugin = createMirrorPlugin() + + memoryFs.seedFile(sourcePath, 'global prompt\n') + memoryFs.seedDirectory(targetHomeDir) + + const result = await syncWindowsConfigIntoWsl( + [plugin], + createWriteContext('Ubuntu'), + { + fs: memoryFs, + spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, + platform: 'win32', + effectiveHomeDir: hostHomeDir + }, + createPredeclaredOutputs(plugin, [createGlobalOutputDeclaration(sourcePath)]) + ) + + expect(result.errors).toEqual([]) + expect(result.warnings).toEqual([]) + expect(result.mirroredFiles).toBe(1) + expect(memoryFs.readFileSync(targetPath).toString('utf8')).toBe('global prompt\n') + }) + + it('excludes generated Windows app-data globals from WSL mirroring', async () => { + const memoryFs = new MemoryMirrorFs() + const hostHomeDir = 'C:\\Users\\alpha' + const sourcePath = path.win32.join(hostHomeDir, 'AppData', 'Local', 'JetBrains', 'IntelliJIdea2026.1', 'aia', 'codex', 'AGENTS.md') + const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' + const plugin = createMirrorPlugin() + + memoryFs.seedFile(sourcePath, 'jetbrains prompt\n') + memoryFs.seedDirectory(targetHomeDir) + + const result = await syncWindowsConfigIntoWsl( + [plugin], + createWriteContext('Ubuntu'), + { + fs: memoryFs, + spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, + platform: 'win32', + effectiveHomeDir: hostHomeDir + }, + createPredeclaredOutputs(plugin, [createGlobalOutputDeclaration(sourcePath)]) + ) + + expect(result).toEqual({ + mirroredFiles: 0, + warnings: [], + errors: [] + }) + }) + + it('unions generated globals with declared mirror files and dedupes by source path', async () => { + const memoryFs = new MemoryMirrorFs() + const hostHomeDir = 'C:\\Users\\alpha' + const configPath = path.win32.join(hostHomeDir, '.codex', 'config.toml') + const authPath = path.win32.join(hostHomeDir, '.codex', 'auth.json') + const promptPath = path.win32.join(hostHomeDir, '.codex', 'AGENTS.md') + const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' + const plugin = createMirrorPlugin(['~/.codex/config.toml', '~/.codex/auth.json']) + + memoryFs.seedFile(configPath, 'codex = true\n') + memoryFs.seedFile(authPath, '{"token":"abc"}\n') + memoryFs.seedFile(promptPath, 'global prompt\n') + memoryFs.seedDirectory(targetHomeDir) + + const result = await syncWindowsConfigIntoWsl( + [plugin], + createWriteContext('Ubuntu'), + { + fs: memoryFs, + spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, + platform: 'win32', + effectiveHomeDir: hostHomeDir + }, + createPredeclaredOutputs(plugin, [ + createGlobalOutputDeclaration(configPath), + createGlobalOutputDeclaration(promptPath) + ]) + ) + + expect(result.errors).toEqual([]) + expect(result.warnings).toEqual([]) + expect(result.mirroredFiles).toBe(3) + expect(memoryFs.readFileSync(path.win32.join(targetHomeDir, '.codex', 'config.toml')).toString('utf8')).toBe('codex = true\n') + expect(memoryFs.readFileSync(path.win32.join(targetHomeDir, '.codex', 'auth.json')).toString('utf8')).toBe('{"token":"abc"}\n') + expect(memoryFs.readFileSync(path.win32.join(targetHomeDir, '.codex', 'AGENTS.md')).toString('utf8')).toBe('global prompt\n') + }) + + it('auto-discovers WSL instances when none are configured', async () => { + const memoryFs = new MemoryMirrorFs() + const hostHomeDir = 'C:\\Users\\alpha' + const sourcePath = path.win32.join(hostHomeDir, '.codex', 'config.toml') + const spawnSyncMock = createWslSpawnSyncMock({ + Ubuntu: '/home/alpha', + Debian: '/home/beta' + }, ['Ubuntu', 'Debian']) + + memoryFs.seedFile(sourcePath, 'codex = true\n') + memoryFs.seedDirectory('\\\\wsl$\\Ubuntu\\home\\alpha') + memoryFs.seedDirectory('\\\\wsl$\\Debian\\home\\beta') + + const result = await syncWindowsConfigIntoWsl( + [createMirrorPlugin('~/.codex/config.toml')], + createWriteContext(), + { + fs: memoryFs, + spawnSync: spawnSyncMock as never, + platform: 'win32', + effectiveHomeDir: hostHomeDir + } + ) + + expect(result.errors).toEqual([]) + expect(result.warnings).toEqual([]) + expect(result.mirroredFiles).toBe(2) + expect(wasWslListCalled(spawnSyncMock)).toBe(true) + }) + + it('prefers configured WSL instances over auto-discovery', async () => { + const memoryFs = new MemoryMirrorFs() + const hostHomeDir = 'C:\\Users\\alpha' + const sourcePath = path.win32.join(hostHomeDir, '.codex', 'config.toml') + const spawnSyncMock = createWslSpawnSyncMock({ + Ubuntu: '/home/alpha', + Debian: '/home/beta' + }, ['Ubuntu', 'Debian']) + + memoryFs.seedFile(sourcePath, 'codex = true\n') + memoryFs.seedDirectory('\\\\wsl$\\Ubuntu\\home\\alpha') + + const result = await syncWindowsConfigIntoWsl( + [createMirrorPlugin('~/.codex/config.toml')], + createWriteContext('Ubuntu'), + { + fs: memoryFs, + spawnSync: spawnSyncMock as never, + platform: 'win32', + effectiveHomeDir: hostHomeDir + } + ) + + expect(result.errors).toEqual([]) + expect(result.warnings).toEqual([]) + expect(result.mirroredFiles).toBe(1) + expect(wasWslListCalled(spawnSyncMock)).toBe(false) + }) + + it('warns and skips when a declared host config file does not exist', async () => { + const memoryFs = new MemoryMirrorFs() + memoryFs.seedDirectory('\\\\wsl$\\Ubuntu\\home\\alpha') + + const result = await syncWindowsConfigIntoWsl( + [createMirrorPlugin('~/.claude/settings.json')], + createWriteContext('Ubuntu'), + { + fs: memoryFs, + spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, + platform: 'win32', + effectiveHomeDir: 'C:\\Users\\alpha' + } + ) + + expect(result.errors).toEqual([]) + expect(result.warnings).toEqual([ + 'Skipping missing WSL mirror source file: C:\\Users\\alpha\\.claude\\settings.json' + ]) + expect(result.mirroredFiles).toBe(0) + }) + + it('validates WSL instance probing before writing any mirrored files', async () => { + const memoryFs = new MemoryMirrorFs() + const hostHomeDir = 'C:\\Users\\alpha' + memoryFs.seedFile(path.win32.join(hostHomeDir, '.codex', 'auth.json'), '{"ok":true}\n') + + const result = await syncWindowsConfigIntoWsl( + [createMirrorPlugin('~/.codex/auth.json')], + createWriteContext('BrokenUbuntu'), + { + fs: memoryFs, + spawnSync: vi.fn(() => ({ + status: 1, + stdout: Buffer.alloc(0), + stderr: Buffer.from('distribution not found', 'utf8') + })) as never, + platform: 'win32', + effectiveHomeDir: hostHomeDir + } + ) + + expect(result.mirroredFiles).toBe(0) + expect(result.warnings).toEqual([]) + expect(result.errors).toEqual([ + 'Failed to probe WSL instance "BrokenUbuntu". distribution not found' + ]) + }) + + it('counts dry-run mirror operations without writing explicit mirror files', async () => { + const memoryFs = new MemoryMirrorFs() + const hostHomeDir = 'C:\\Users\\alpha' + const sourcePath = path.win32.join(hostHomeDir, '.claude', 'config.json') + const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' + const targetPath = path.win32.join(targetHomeDir, '.claude', 'config.json') + + memoryFs.seedFile(sourcePath, '{"theme":"dark"}\n') + memoryFs.seedDirectory(targetHomeDir) + + const result = await syncWindowsConfigIntoWsl( + [createMirrorPlugin('~/.claude/config.json')], + createWriteContext('Ubuntu', true), + { + fs: memoryFs, + spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, + platform: 'win32', + effectiveHomeDir: hostHomeDir + } + ) + + expect(result.errors).toEqual([]) + expect(result.mirroredFiles).toBe(1) + expect(memoryFs.existsSync(targetPath)).toBe(false) + }) + + it('counts generated outputs during dry-run even before the host file exists', async () => { + const memoryFs = new MemoryMirrorFs() + const hostHomeDir = 'C:\\Users\\alpha' + const sourcePath = path.win32.join(hostHomeDir, '.codex', 'AGENTS.md') + const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' + const targetPath = path.win32.join(targetHomeDir, '.codex', 'AGENTS.md') + const plugin = createMirrorPlugin() + + memoryFs.seedDirectory(targetHomeDir) + + const result = await syncWindowsConfigIntoWsl( + [plugin], + createWriteContext('Ubuntu', true), + { + fs: memoryFs, + spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, + platform: 'win32', + effectiveHomeDir: hostHomeDir + }, + createPredeclaredOutputs(plugin, [createGlobalOutputDeclaration(sourcePath)]) + ) + + expect(result.errors).toEqual([]) + expect(result.warnings).toEqual([]) + expect(result.mirroredFiles).toBe(1) + expect(memoryFs.existsSync(targetPath)).toBe(false) + }) + + it('logs info and skips mirror sync when WSL is unavailable on the host', async () => { + const memoryFs = new MemoryMirrorFs() + const logger = createLogger() + + const result = await syncWindowsConfigIntoWsl( + [createMirrorPlugin('~/.codex/config.toml')], + { + ...createWriteContext('Ubuntu'), + logger + }, + { + fs: memoryFs, + spawnSync: vi.fn(() => ({ + status: null, + stdout: Buffer.alloc(0), + stderr: Buffer.alloc(0), + error: Object.assign(new Error('spawnSync wsl.exe ENOENT'), {code: 'ENOENT'}) + })) as never, + platform: 'win32', + effectiveHomeDir: 'C:\\Users\\alpha' + } + ) + + expect(result).toEqual({ + mirroredFiles: 0, + warnings: [], + errors: [] + }) + expect(logger.infoMessages).toContain('wsl is unavailable, skipping WSL mirror sync') + }) + + it('mirrors declared host config files back into the current WSL home when running inside WSL', async () => { + const memoryFs = new MemoryMirrorFs() + const hostHomeDir = '/mnt/c/Users/alpha' + const nativeHomeDir = '/home/alpha' + const sourcePath = path.posix.join(hostHomeDir, '.codex', 'config.toml') + const targetPath = path.posix.join(nativeHomeDir, '.codex', 'config.toml') + + memoryFs.seedFile(sourcePath, 'codex = true\n') + memoryFs.seedDirectory(nativeHomeDir) + + const result = await syncWindowsConfigIntoWsl( + [createMirrorPlugin('~/.codex/config.toml')], + createWriteContext(), + { + fs: memoryFs, + platform: 'linux', + isWsl: true, + effectiveHomeDir: hostHomeDir, + nativeHomeDir + } + ) + + expect(result.errors).toEqual([]) + expect(result.warnings).toEqual([]) + expect(result.mirroredFiles).toBe(1) + expect(memoryFs.readFileSync(targetPath).toString('utf8')).toBe('codex = true\n') + }) + + it('mirrors generated global outputs back into the current WSL home when running inside WSL', async () => { + const memoryFs = new MemoryMirrorFs() + const hostHomeDir = '/mnt/c/Users/alpha' + const nativeHomeDir = '/home/alpha' + const sourcePath = path.posix.join(hostHomeDir, '.codex', 'AGENTS.md') + const targetPath = path.posix.join(nativeHomeDir, '.codex', 'AGENTS.md') + const plugin = createMirrorPlugin() + + memoryFs.seedFile(sourcePath, 'global prompt\n') + memoryFs.seedDirectory(nativeHomeDir) + + const result = await syncWindowsConfigIntoWsl( + [plugin], + createWriteContext(), + { + fs: memoryFs, + platform: 'linux', + isWsl: true, + effectiveHomeDir: hostHomeDir, + nativeHomeDir + }, + createPredeclaredOutputs(plugin, [createGlobalOutputDeclaration(sourcePath)]) + ) + + expect(result.errors).toEqual([]) + expect(result.warnings).toEqual([]) + expect(result.mirroredFiles).toBe(1) + expect(memoryFs.readFileSync(targetPath).toString('utf8')).toBe('global prompt\n') + }) +}) diff --git a/sdk/src/wsl-mirror-sync.ts b/sdk/src/wsl-mirror-sync.ts new file mode 100644 index 00000000..04d4cffc --- /dev/null +++ b/sdk/src/wsl-mirror-sync.ts @@ -0,0 +1,656 @@ +import type { + ILogger, + OutputFileDeclaration, + OutputPlugin, + OutputWriteContext, + PluginOptions, + WslMirrorFileDeclaration +} from './plugins/plugin-core' +import type {RuntimeEnvironmentContext} from './runtime-environment' +import {Buffer} from 'node:buffer' +import {spawnSync} from 'node:child_process' +import * as fs from 'node:fs' +import * as path from 'node:path' +import process from 'node:process' +import {getEffectiveHomeDir, resolveRuntimeEnvironment, resolveUserPath} from './runtime-environment' + +type MirrorFs = Pick +type SpawnSyncFn = typeof spawnSync +type SpawnSyncResult = ReturnType + +export interface WslMirrorRuntimeDependencies { + readonly fs?: MirrorFs + readonly spawnSync?: SpawnSyncFn + readonly platform?: NodeJS.Platform + readonly effectiveHomeDir?: string + readonly nativeHomeDir?: string + readonly isWsl?: boolean +} + +export interface ResolvedWslInstanceTarget { + readonly instance: string + readonly linuxHomeDir: string + readonly windowsHomeDir: string +} + +export interface WslMirrorSyncResult { + readonly mirroredFiles: number + readonly warnings: readonly string[] + readonly errors: readonly string[] +} + +class WslUnavailableError extends Error {} + +interface ResolvedWslMirrorSource { + readonly kind: 'declared' | 'generated' + readonly sourcePath: string + readonly relativePathSegments: readonly string[] +} + +function getFs(dependencies?: WslMirrorRuntimeDependencies): MirrorFs { + return dependencies?.fs ?? fs +} + +function getSpawnSync(dependencies?: WslMirrorRuntimeDependencies): SpawnSyncFn { + return dependencies?.spawnSync ?? spawnSync +} + +function getPlatform(dependencies?: WslMirrorRuntimeDependencies): NodeJS.Platform { + return dependencies?.platform ?? process.platform +} + +function getHostHomeDir(dependencies?: WslMirrorRuntimeDependencies): string { + return dependencies?.effectiveHomeDir ?? getEffectiveHomeDir() +} + +function getNativeHomeDir(dependencies?: WslMirrorRuntimeDependencies): string { + return dependencies?.nativeHomeDir ?? resolveRuntimeEnvironment().nativeHomeDir +} + +function isWslExecutionRuntime(dependencies?: WslMirrorRuntimeDependencies): boolean { + return dependencies?.isWsl ?? resolveRuntimeEnvironment().isWsl +} + +function getPathModuleForPlatform( + platform: NodeJS.Platform +): typeof path.win32 | typeof path.posix { + return platform === 'win32' ? path.win32 : path.posix +} + +function normalizeInstanceNames( + instances: readonly string[] +): string[] { + return [...new Set(instances.map(instance => instance.trim()).filter(instance => instance.length > 0))] +} + +function normalizeConfiguredInstances( + pluginOptions?: PluginOptions +): string[] { + const configuredInstances = pluginOptions?.windows?.wsl2?.instances + const instanceList = configuredInstances == null + ? [] + : Array.isArray(configuredInstances) + ? configuredInstances + : [configuredInstances] + + return normalizeInstanceNames(instanceList) +} + +function buildWindowsWslHomePath( + instance: string, + linuxHomeDir: string +): string { + if (!linuxHomeDir.startsWith('/')) { + throw new Error(`WSL instance "${instance}" returned a non-absolute home path: "${linuxHomeDir}".`) + } + + const pathSegments = linuxHomeDir.split('/').filter(segment => segment.length > 0) + return path.win32.join(`\\\\wsl$\\${instance}`, ...pathSegments) +} + +function resolveMirroredRelativePathSegments( + sourcePath: string, + hostHomeDir: string, + platform: NodeJS.Platform +): string[] { + const pathModule = getPathModuleForPlatform(platform) + const normalizedHostHome = pathModule.normalize(hostHomeDir) + const normalizedSourcePath = pathModule.normalize(sourcePath) + const relativePath = pathModule.relative(normalizedHostHome, normalizedSourcePath) + + if ( + relativePath.length === 0 + || relativePath.startsWith('..') + || pathModule.isAbsolute(relativePath) + ) { + throw new Error( + `WSL mirror source "${sourcePath}" must stay under the host home directory "${hostHomeDir}".` + ) + } + + return relativePath.split(/[\\/]+/u).filter(segment => segment.length > 0) +} + +function decodeWslCliOutput( + value: unknown +): string { + if (typeof value === 'string') return value + if (!Buffer.isBuffer(value) || value.length === 0) return '' + + const hasUtf16LeBom = value.length >= 2 && value[0] === 0xFF && value[1] === 0xFE + const hasUtf16BeBom = value.length >= 2 && value[0] === 0xFE && value[1] === 0xFF + if (hasUtf16LeBom || hasUtf16BeBom) return value.toString('utf16le').replace(/^\uFEFF/u, '') + + const utf8Text = value.toString('utf8') + if (utf8Text.includes('\u0000')) return value.toString('utf16le').replace(/^\uFEFF/u, '') + return utf8Text +} + +function getSpawnOutputText( + value: unknown +): string { + return decodeWslCliOutput(value).replaceAll('\u0000', '') +} + +function getSpawnSyncErrorCode(result: SpawnSyncResult): string | undefined { + const {error} = result + if (error == null || typeof error !== 'object') return void 0 + return 'code' in error && typeof error.code === 'string' ? error.code : void 0 +} + +function getWslUnavailableReason(result: SpawnSyncResult): string | undefined { + const errorCode = getSpawnSyncErrorCode(result) + if (errorCode === 'ENOENT') return 'wsl.exe is not available on PATH.' + + const combinedOutput = [result.stderr, result.stdout] + .map(value => getSpawnOutputText(value).trim()) + .filter(value => value.length > 0) + .join('\n') + .toLowerCase() + + if (combinedOutput.length === 0) return void 0 + + const unavailableMarkers = [ + 'windows subsystem for linux has no installed distributions', + 'windows subsystem for linux has not been enabled', + 'the windows subsystem for linux optional component is not enabled', + 'wsl is not installed', + 'run \'wsl.exe --install\'', + 'run "wsl.exe --install"', + 'wslregisterdistribution failed with error: 0x8007019e' + ] + + return unavailableMarkers.some(marker => combinedOutput.includes(marker)) + ? combinedOutput + : void 0 +} + +export async function collectDeclaredWslMirrorFiles( + outputPlugins: readonly OutputPlugin[], + ctx: OutputWriteContext +): Promise { + const declarations = await Promise.all(outputPlugins.map(async plugin => { + if (plugin.declareWslMirrorFiles == null) return [] + return plugin.declareWslMirrorFiles(ctx) + })) + + const dedupedDeclarations = new Map() + for (const group of declarations) { + for (const declaration of group) { + dedupedDeclarations.set(declaration.sourcePath, declaration) + } + } + + return [...dedupedDeclarations.values()] +} + +function buildWindowsMirrorPathRuntimeContext( + hostHomeDir: string +): RuntimeEnvironmentContext { + return { + platform: 'win32', + isWsl: false, + nativeHomeDir: hostHomeDir, + effectiveHomeDir: hostHomeDir, + globalConfigCandidates: [], + windowsUsersRoot: '', + expandedEnv: { + HOME: hostHomeDir, + USERPROFILE: hostHomeDir + } + } +} + +function buildWslHostMirrorPathRuntimeContext( + hostHomeDir: string, + nativeHomeDir: string +): RuntimeEnvironmentContext { + return { + platform: 'linux', + isWsl: true, + nativeHomeDir, + effectiveHomeDir: hostHomeDir, + globalConfigCandidates: [], + windowsUsersRoot: '', + expandedEnv: { + HOME: hostHomeDir, + USERPROFILE: hostHomeDir + } + } +} + +function parseWslInstanceList( + rawOutput: string +): string[] { + const instanceList = rawOutput + .split(/\r?\n/u) + .map(line => line.replace(/^\*/u, '').trim()) + .filter(line => line.length > 0) + + return normalizeInstanceNames(instanceList) +} + +function discoverWslInstances( + logger: ILogger, + dependencies?: WslMirrorRuntimeDependencies +): string[] { + const spawnSyncImpl = getSpawnSync(dependencies) + const listResult = spawnSyncImpl('wsl.exe', ['--list', '--quiet'], { + shell: false, + windowsHide: true + }) + + const unavailableReason = getWslUnavailableReason(listResult) + if (unavailableReason != null) throw new WslUnavailableError(unavailableReason) + + if (listResult.status !== 0) { + const stderr = getSpawnOutputText(listResult.stderr).trim() + throw new Error( + `Failed to enumerate WSL instances. ${stderr.length > 0 ? stderr : 'wsl.exe returned a non-zero exit status.'}` + ) + } + + const discoveredInstances = parseWslInstanceList(getSpawnOutputText(listResult.stdout)) + logger.info('discovered wsl instances', { + instances: discoveredInstances + }) + return discoveredInstances +} + +function resolveConfiguredOrDiscoveredInstances( + pluginOptions: Required, + logger: ILogger, + dependencies?: WslMirrorRuntimeDependencies +): string[] { + const configuredInstances = normalizeConfiguredInstances(pluginOptions) + if (configuredInstances.length > 0) return configuredInstances + return discoverWslInstances(logger, dependencies) +} + +function resolveGeneratedWslMirrorSource( + declaration: OutputFileDeclaration, + hostHomeDir: string, + platform: NodeJS.Platform +): ResolvedWslMirrorSource | undefined { + if (declaration.scope !== 'global') return void 0 + + const pathModule = getPathModuleForPlatform(platform) + const sourcePath = pathModule.normalize(declaration.path) + let relativePathSegments: string[] + try { + relativePathSegments = resolveMirroredRelativePathSegments(sourcePath, hostHomeDir, platform) + } + catch { + return void 0 + } + + const [topLevelSegment] = relativePathSegments + + // Mirror home-style tool config roots only. Windows app-data trees such as + // AppData\Local\JetBrains\... stay Windows-only even though they live under the user profile. + if (!topLevelSegment?.startsWith('.')) return void 0 + + return { + kind: 'generated', + sourcePath, + relativePathSegments + } +} + +function collectGeneratedWslMirrorSources( + predeclaredOutputs: ReadonlyMap | undefined, + hostHomeDir: string, + platform: NodeJS.Platform +): readonly ResolvedWslMirrorSource[] { + if (predeclaredOutputs == null) return [] + + const dedupedSources = new Map() + for (const declarations of predeclaredOutputs.values()) { + for (const declaration of declarations) { + const resolvedSource = resolveGeneratedWslMirrorSource(declaration, hostHomeDir, platform) + if (resolvedSource == null) continue + dedupedSources.set(resolvedSource.sourcePath, resolvedSource) + } + } + + return [...dedupedSources.values()] +} + +function resolveDeclaredWslMirrorSource( + declaration: WslMirrorFileDeclaration, + pathRuntimeContext: RuntimeEnvironmentContext, + hostHomeDir: string, + platform: NodeJS.Platform +): ResolvedWslMirrorSource { + const pathModule = getPathModuleForPlatform(platform) + const sourcePath = pathModule.normalize(resolveUserPath(declaration.sourcePath, pathRuntimeContext)) + const relativePathSegments = resolveMirroredRelativePathSegments(sourcePath, hostHomeDir, platform) + + return { + kind: 'declared', + sourcePath, + relativePathSegments + } +} + +function combineWslMirrorSources( + mirrorDeclarations: readonly WslMirrorFileDeclaration[], + generatedMirrorSources: readonly ResolvedWslMirrorSource[], + pathRuntimeContext: RuntimeEnvironmentContext, + hostHomeDir: string, + platform: NodeJS.Platform +): {readonly sources: readonly ResolvedWslMirrorSource[], readonly errors: readonly string[]} { + const dedupedSources = new Map() + const errors: string[] = [] + + for (const declaration of mirrorDeclarations) { + try { + const resolvedSource = resolveDeclaredWslMirrorSource(declaration, pathRuntimeContext, hostHomeDir, platform) + dedupedSources.set(resolvedSource.sourcePath, resolvedSource) + } + catch (error) { + errors.push(error instanceof Error ? error.message : String(error)) + } + } + + for (const source of generatedMirrorSources) { + dedupedSources.set(source.sourcePath, source) + } + + return { + sources: [...dedupedSources.values()], + errors + } +} + +export function resolveWslInstanceTargets( + pluginOptions: Required, + logger: ILogger, + dependencies?: WslMirrorRuntimeDependencies +): ResolvedWslInstanceTarget[] { + if (getPlatform(dependencies) !== 'win32') return [] + + const configuredInstances = resolveConfiguredOrDiscoveredInstances(pluginOptions, logger, dependencies) + if (configuredInstances.length === 0) return [] + + const fsImpl = getFs(dependencies) + const spawnSyncImpl = getSpawnSync(dependencies) + const resolvedTargets: ResolvedWslInstanceTarget[] = [] + + for (const instance of configuredInstances) { + const probeResult = spawnSyncImpl('wsl.exe', ['-d', instance, 'sh', '-lc', 'printf %s "$HOME"'], { + shell: false, + windowsHide: true + }) + + const unavailableReason = getWslUnavailableReason(probeResult) + if (unavailableReason != null) throw new WslUnavailableError(unavailableReason) + + if (probeResult.status !== 0) { + const stderr = getSpawnOutputText(probeResult.stderr).trim() + throw new Error( + `Failed to probe WSL instance "${instance}". ${stderr.length > 0 ? stderr : 'wsl.exe returned a non-zero exit status.'}` + ) + } + + const linuxHomeDir = getSpawnOutputText(probeResult.stdout).trim() + if (linuxHomeDir.length === 0) throw new Error(`WSL instance "${instance}" returned an empty home directory.`) + + const windowsHomeDir = buildWindowsWslHomePath(instance, linuxHomeDir) + if (!fsImpl.existsSync(windowsHomeDir)) { + throw new Error( + `WSL instance "${instance}" home directory is unavailable at "${windowsHomeDir}".` + ) + } + + logger.info('resolved wsl instance home', { + instance, + linuxHomeDir, + windowsHomeDir + }) + + resolvedTargets.push({ + instance, + linuxHomeDir, + windowsHomeDir + }) + } + + return resolvedTargets +} + +function syncResolvedMirrorSourcesIntoCurrentWslHome( + sources: readonly ResolvedWslMirrorSource[], + ctx: OutputWriteContext, + dependencies?: WslMirrorRuntimeDependencies +): WslMirrorSyncResult { + const fsImpl = getFs(dependencies) + const nativeHomeDir = path.posix.normalize(getNativeHomeDir(dependencies)) + let mirroredFiles = 0 + const warnings: string[] = [] + const errors: string[] = [] + + for (const source of sources) { + if (source.kind === 'declared' && !fsImpl.existsSync(source.sourcePath)) { + const warningMessage = `Skipping missing WSL mirror source file: ${source.sourcePath}` + warnings.push(warningMessage) + ctx.logger.warn({ + code: 'WSL_MIRROR_SOURCE_MISSING', + title: 'WSL mirror source file is missing', + rootCause: [warningMessage], + exactFix: [ + 'Create the source file on the Windows host or remove the WSL mirror declaration before retrying tnmsc.' + ] + }) + continue + } + + const targetPath = path.posix.join(nativeHomeDir, ...source.relativePathSegments) + try { + if (ctx.dryRun === true) { + ctx.logger.info('would mirror host config into wsl runtime home', { + sourcePath: source.sourcePath, + targetPath, + dryRun: true + }) + } else { + const content = fsImpl.readFileSync(source.sourcePath) + fsImpl.mkdirSync(path.posix.dirname(targetPath), {recursive: true}) + fsImpl.writeFileSync(targetPath, content) + ctx.logger.info('mirrored host config into wsl runtime home', { + sourcePath: source.sourcePath, + targetPath + }) + } + + mirroredFiles += 1 + } + catch (error) { + errors.push( + `Failed to mirror "${source.sourcePath}" into the current WSL home at "${targetPath}": ${error instanceof Error ? error.message : String(error)}` + ) + } + } + + return { + mirroredFiles, + warnings, + errors + } +} + +export async function syncWindowsConfigIntoWsl( + outputPlugins: readonly OutputPlugin[], + ctx: OutputWriteContext, + dependencies?: WslMirrorRuntimeDependencies, + predeclaredOutputs?: ReadonlyMap +): Promise { + const platform = getPlatform(dependencies) + const wslRuntime = platform === 'linux' && isWslExecutionRuntime(dependencies) + if (platform !== 'win32' && !wslRuntime) { + return { + mirroredFiles: 0, + warnings: [], + errors: [] + } + } + + const hostHomeDir = wslRuntime + ? path.posix.normalize(getHostHomeDir(dependencies)) + : path.win32.normalize(getHostHomeDir(dependencies)) + const mirrorDeclarations = await collectDeclaredWslMirrorFiles(outputPlugins, ctx) + const generatedMirrorSources = collectGeneratedWslMirrorSources(predeclaredOutputs, hostHomeDir, platform) + if (mirrorDeclarations.length === 0 && generatedMirrorSources.length === 0) { + return { + mirroredFiles: 0, + warnings: [], + errors: [] + } + } + + const pluginOptions = (ctx.pluginOptions ?? {}) as Required + const nativeHomeDir = wslRuntime ? path.posix.normalize(getNativeHomeDir(dependencies)) : void 0 + const pathRuntimeContext = wslRuntime + ? buildWslHostMirrorPathRuntimeContext(hostHomeDir, nativeHomeDir ?? hostHomeDir) + : buildWindowsMirrorPathRuntimeContext(hostHomeDir) + const resolvedMirrorSources = combineWslMirrorSources( + mirrorDeclarations, + generatedMirrorSources, + pathRuntimeContext, + hostHomeDir, + platform + ) + + if (wslRuntime) { + if (resolvedMirrorSources.sources.length === 0 || nativeHomeDir == null || hostHomeDir === nativeHomeDir) { + return { + mirroredFiles: 0, + warnings: [], + errors: [...resolvedMirrorSources.errors] + } + } + + const localMirrorResult = syncResolvedMirrorSourcesIntoCurrentWslHome( + resolvedMirrorSources.sources, + ctx, + dependencies + ) + + return { + mirroredFiles: localMirrorResult.mirroredFiles, + warnings: [...localMirrorResult.warnings], + errors: [...resolvedMirrorSources.errors, ...localMirrorResult.errors] + } + } + + let resolvedTargets: ResolvedWslInstanceTarget[] + try { + resolvedTargets = resolveWslInstanceTargets(pluginOptions, ctx.logger, dependencies) + } + catch (error) { + if (error instanceof WslUnavailableError) { + ctx.logger.info('wsl is unavailable, skipping WSL mirror sync', { + reason: error.message + }) + return { + mirroredFiles: 0, + warnings: [], + errors: [] + } + } + + return { + mirroredFiles: 0, + warnings: [], + errors: [error instanceof Error ? error.message : String(error)] + } + } + + if (resolvedTargets.length === 0 || resolvedMirrorSources.sources.length === 0) { + return { + mirroredFiles: 0, + warnings: [], + errors: [...resolvedMirrorSources.errors] + } + } + + const fsImpl = getFs(dependencies) + let mirroredFiles = 0 + const warnings: string[] = [] + const errors: string[] = [...resolvedMirrorSources.errors] + + for (const declaration of resolvedMirrorSources.sources) { + if (declaration.kind === 'declared' && !fsImpl.existsSync(declaration.sourcePath)) { + const warningMessage = `Skipping missing WSL mirror source file: ${declaration.sourcePath}` + warnings.push(warningMessage) + ctx.logger.warn({ + code: 'WSL_MIRROR_SOURCE_MISSING', + title: 'WSL mirror source file is missing', + rootCause: [warningMessage], + exactFix: [ + 'Create the source file on the Windows host or remove the WSL mirror declaration before retrying tnmsc.' + ] + }) + continue + } + + const {relativePathSegments, sourcePath} = declaration + + for (const resolvedTarget of resolvedTargets) { + const targetPath = path.win32.join(resolvedTarget.windowsHomeDir, ...relativePathSegments) + + try { + if (ctx.dryRun === true) { + ctx.logger.info('would mirror windows config into wsl', { + instance: resolvedTarget.instance, + sourcePath, + targetPath, + dryRun: true + }) + } else { + const content = fsImpl.readFileSync(sourcePath) + fsImpl.mkdirSync(path.win32.dirname(targetPath), {recursive: true}) + fsImpl.writeFileSync(targetPath, content) + ctx.logger.info('mirrored windows config into wsl', { + instance: resolvedTarget.instance, + sourcePath, + targetPath + }) + } + + mirroredFiles += 1 + } + catch (error) { + errors.push( + `Failed to mirror "${sourcePath}" into WSL instance "${resolvedTarget.instance}" at "${targetPath}": ${error instanceof Error ? error.message : String(error)}` + ) + } + } + } + + return { + mirroredFiles, + warnings, + errors + } +} From 79468908e1f08cc63b1b033ff02c168d3d81961a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:32:33 +0800 Subject: [PATCH 12/27] feat: add script to ensure SDK build and sync SDK distribution files --- cli/scripts/benchmark-cleanup.ts | 147 ---------------------------- cli/scripts/cleanup-native-smoke.ts | 145 --------------------------- cli/scripts/ensure-sdk-build.ts | 38 +++++++ cli/scripts/finalize-bundle.ts | 143 --------------------------- cli/scripts/generate-schema.ts | 5 - cli/scripts/sync-sdk-dist.ts | 37 +++++++ 6 files changed, 75 insertions(+), 440 deletions(-) delete mode 100644 cli/scripts/benchmark-cleanup.ts delete mode 100644 cli/scripts/cleanup-native-smoke.ts create mode 100644 cli/scripts/ensure-sdk-build.ts delete mode 100644 cli/scripts/finalize-bundle.ts delete mode 100644 cli/scripts/generate-schema.ts create mode 100644 cli/scripts/sync-sdk-dist.ts diff --git a/cli/scripts/benchmark-cleanup.ts b/cli/scripts/benchmark-cleanup.ts deleted file mode 100644 index 9c0cb13f..00000000 --- a/cli/scripts/benchmark-cleanup.ts +++ /dev/null @@ -1,147 +0,0 @@ -import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../src/plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import {performance} from 'node:perf_hooks' -import glob from 'fast-glob' - -process.env['TNMSC_FORCE_NATIVE_BINDING'] = '1' -delete process.env['VITEST'] -delete process.env['VITEST_WORKER_ID'] - -const cleanupModule = await import('../src/commands/CleanupUtils') -const pluginCore = await import('../src/plugins/plugin-core') - -function createMockLogger(): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger -} - -function createCleanContext(workspaceDir: string): OutputCleanContext { - return { - logger: createMockLogger(), - fs, - path, - glob, - collectedOutputContext: { - workspace: { - directory: { - pathKind: pluginCore.FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: Array.from({length: 40}, (_, index) => ({ - dirFromWorkspacePath: { - pathKind: pluginCore.FilePathKind.Relative, - path: `project-${index}`, - basePath: workspaceDir, - getDirectoryName: () => `project-${index}`, - getAbsolutePath: () => path.join(workspaceDir, `project-${index}`) - } - })) - }, - aindexDir: path.join(workspaceDir, 'aindex') - } - } as OutputCleanContext -} - -function createBenchmarkPlugin(workspaceDir: string): OutputPlugin { - return { - type: pluginCore.PluginKind.Output, - name: 'BenchmarkOutputPlugin', - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return Array.from({length: 40}, (_, projectIndex) => ([ - {path: path.join(workspaceDir, `project-${projectIndex}`, 'AGENTS.md'), source: {}}, - {path: path.join(workspaceDir, `project-${projectIndex}`, 'commands', 'AGENTS.md'), source: {}} - ])).flat() - }, - async declareCleanupPaths(): Promise { - return { - delete: [{ - kind: 'glob', - path: path.join(workspaceDir, '.codex', 'skills', '*'), - excludeBasenames: ['.system'] - }, { - kind: 'glob', - path: path.join(workspaceDir, '.claude', '**', 'CLAUDE.md') - }], - protect: [{ - kind: 'directory', - path: path.join(workspaceDir, '.codex', 'skills', '.system'), - protectionMode: 'recursive' - }] - } - }, - async convertContent() { - return 'benchmark' - } - } -} - -async function measure(label: string, iterations: number, run: () => Promise): Promise { - const start = performance.now() - for (let index = 0; index < iterations; index += 1) { - await run() - } - const total = performance.now() - start - const average = total / iterations - process.stdout.write(`${label}: total=${total.toFixed(2)}ms avg=${average.toFixed(2)}ms\n`) - return average -} - -async function main(): Promise { - if (!cleanupModule.hasNativeCleanupBinding()) { - throw new Error('Native cleanup binding is unavailable. Build the CLI NAPI module first.') - } - - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-benchmark-cleanup-')) - const workspaceDir = path.join(tempDir, 'workspace') - - try { - for (let projectIndex = 0; projectIndex < 40; projectIndex += 1) { - const rootFile = path.join(workspaceDir, `project-${projectIndex}`, 'AGENTS.md') - const childFile = path.join(workspaceDir, `project-${projectIndex}`, 'commands', 'AGENTS.md') - fs.mkdirSync(path.dirname(childFile), {recursive: true}) - fs.writeFileSync(rootFile, '# root', 'utf8') - fs.writeFileSync(childFile, '# child', 'utf8') - } - - const skillsDir = path.join(workspaceDir, '.codex', 'skills') - fs.mkdirSync(path.join(skillsDir, '.system'), {recursive: true}) - for (let index = 0; index < 80; index += 1) { - const skillDir = path.join(skillsDir, `legacy-${index}`) - fs.mkdirSync(skillDir, {recursive: true}) - fs.writeFileSync(path.join(skillDir, 'SKILL.md'), '# stale', 'utf8') - } - - for (let index = 0; index < 40; index += 1) { - const claudeFile = path.join(workspaceDir, '.claude', `project-${index}`, 'CLAUDE.md') - fs.mkdirSync(path.dirname(claudeFile), {recursive: true}) - fs.writeFileSync(claudeFile, '# claude', 'utf8') - } - - const plugin = createBenchmarkPlugin(workspaceDir) - const cleanCtx = createCleanContext(workspaceDir) - const iterations = 25 - - process.stdout.write(`cleanup benchmark iterations=${iterations}\n`) - await measure('native-plan', iterations, async () => { - await cleanupModule.collectDeletionTargets([plugin], cleanCtx) - }) - } - finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } -} - -await main() diff --git a/cli/scripts/cleanup-native-smoke.ts b/cli/scripts/cleanup-native-smoke.ts deleted file mode 100644 index 1e9c84b6..00000000 --- a/cli/scripts/cleanup-native-smoke.ts +++ /dev/null @@ -1,145 +0,0 @@ -import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../src/plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' - -process.env['TNMSC_FORCE_NATIVE_BINDING'] = '1' -delete process.env['VITEST'] -delete process.env['VITEST_WORKER_ID'] - -const cleanupModule = await import('../src/commands/CleanupUtils') -const pluginCore = await import('../src/plugins/plugin-core') - -function createMockLogger(): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger -} - -function createCleanContext(workspaceDir: string): OutputCleanContext { - return { - logger: createMockLogger(), - fs, - path, - glob, - collectedOutputContext: { - workspace: { - directory: { - pathKind: pluginCore.FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [{ - dirFromWorkspacePath: { - pathKind: pluginCore.FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceDir, 'project-a') - } - }] - }, - aindexDir: path.join(workspaceDir, 'aindex') - } - } as OutputCleanContext -} - -function createSmokePlugin(workspaceDir: string): OutputPlugin { - return { - type: pluginCore.PluginKind.Output, - name: 'SmokeOutputPlugin', - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return [ - {path: path.join(workspaceDir, 'project-a', 'AGENTS.md'), source: {}}, - {path: path.join(workspaceDir, 'project-a', 'commands', 'AGENTS.md'), source: {}} - ] - }, - async declareCleanupPaths(): Promise { - return { - delete: [{ - kind: 'glob', - path: path.join(workspaceDir, '.codex', 'skills', '*'), - excludeBasenames: ['.system'] - }] - } - }, - async convertContent() { - return 'smoke' - } - } -} - -async function main(): Promise { - if (!cleanupModule.hasNativeCleanupBinding()) { - throw new Error('Native cleanup binding is unavailable. Build the CLI NAPI module first.') - } - - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-native-cleanup-smoke-')) - const workspaceDir = path.join(tempDir, 'workspace') - const legacySkillDir = path.join(workspaceDir, '.codex', 'skills', 'legacy') - const preservedSkillDir = path.join(workspaceDir, '.codex', 'skills', '.system') - const rootOutput = path.join(workspaceDir, 'project-a', 'AGENTS.md') - const childOutput = path.join(workspaceDir, 'project-a', 'commands', 'AGENTS.md') - - fs.mkdirSync(path.dirname(rootOutput), {recursive: true}) - fs.mkdirSync(path.dirname(childOutput), {recursive: true}) - fs.mkdirSync(legacySkillDir, {recursive: true}) - fs.mkdirSync(preservedSkillDir, {recursive: true}) - fs.writeFileSync(rootOutput, '# root', 'utf8') - fs.writeFileSync(childOutput, '# child', 'utf8') - fs.writeFileSync(path.join(legacySkillDir, 'SKILL.md'), '# stale', 'utf8') - fs.writeFileSync(path.join(preservedSkillDir, 'SKILL.md'), '# keep', 'utf8') - - try { - const plugin = createSmokePlugin(workspaceDir) - const cleanCtx = createCleanContext(workspaceDir) - - const nativePlan = await cleanupModule.collectDeletionTargets([plugin], cleanCtx) - expectSetEqual(nativePlan.filesToDelete, [rootOutput, childOutput], 'native cleanup plan files') - expectSetEqual(nativePlan.dirsToDelete, [ - legacySkillDir, - path.join(workspaceDir, 'project-a', 'commands'), - path.join(workspaceDir, 'project-a') - ], 'native cleanup plan directories') - if (nativePlan.violations.length > 0 || nativePlan.conflicts.length > 0) { - throw new Error(`Unexpected native cleanup plan: ${JSON.stringify(nativePlan, null, 2)}`) - } - - const result = await cleanupModule.performCleanup([plugin], cleanCtx, createMockLogger()) - if (result.deletedFiles !== 2 || result.deletedDirs !== 3 || result.errors.length > 0) { - throw new Error(`Unexpected native cleanup result: ${JSON.stringify(result, null, 2)}`) - } - - if (fs.existsSync(rootOutput) || fs.existsSync(childOutput) || fs.existsSync(legacySkillDir)) { - throw new Error('Native cleanup did not remove the expected outputs') - } - if (!fs.existsSync(preservedSkillDir)) { - throw new Error('Native cleanup removed the preserved .system skill directory') - } - - process.stdout.write('cleanup-native-smoke: ok\n') - } - finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } -} - -function expectSetEqual(actual: readonly string[], expected: readonly string[], label: string): void { - const actualSorted = [...actual].sort() - const expectedSorted = [...expected].sort() - if (JSON.stringify(actualSorted) !== JSON.stringify(expectedSorted)) { - throw new Error(`Unexpected ${label}: ${JSON.stringify(actualSorted)} !== ${JSON.stringify(expectedSorted)}`) - } -} - -await main() diff --git a/cli/scripts/ensure-sdk-build.ts b/cli/scripts/ensure-sdk-build.ts new file mode 100644 index 00000000..84aebe5a --- /dev/null +++ b/cli/scripts/ensure-sdk-build.ts @@ -0,0 +1,38 @@ +#!/usr/bin/env tsx + +import {spawnSync} from 'node:child_process' +import {existsSync} from 'node:fs' +import {dirname, resolve} from 'node:path' +import {fileURLToPath} from 'node:url' + +const __dirname = dirname(fileURLToPath(import.meta.url)) +const cliDir = resolve(__dirname, '..') +const workspaceDir = resolve(cliDir, '..') +const sdkDistDir = resolve(cliDir, '../sdk/dist') + +const REQUIRED_SDK_OUTPUTS = [ + 'index.mjs', + 'index.d.mts', + 'globals.mjs', + 'globals.d.mts', + 'plugin-runtime.mjs', + 'script-runtime-worker.mjs', + 'tnmsc.schema.json' +] as const + +function hasRequiredSdkOutputs(): boolean { + return REQUIRED_SDK_OUTPUTS.every(fileName => existsSync(resolve(sdkDistDir, fileName))) +} + +if (!hasRequiredSdkOutputs()) { + const result = spawnSync( + 'pnpm', + ['-F', '@truenine/memory-sync-sdk', 'run', 'build'], + { + cwd: workspaceDir, + stdio: 'inherit' + } + ) + + process.exit(result.status ?? 1) +} diff --git a/cli/scripts/finalize-bundle.ts b/cli/scripts/finalize-bundle.ts deleted file mode 100644 index d53fa142..00000000 --- a/cli/scripts/finalize-bundle.ts +++ /dev/null @@ -1,143 +0,0 @@ -import {spawnSync} from 'node:child_process' -import {copyFileSync, existsSync, mkdtempSync, readdirSync, rmSync, writeFileSync} from 'node:fs' -import {tmpdir} from 'node:os' -import {dirname, join, resolve} from 'node:path' -import {fileURLToPath, pathToFileURL} from 'node:url' - -const scriptDir = dirname(fileURLToPath(import.meta.url)) -const cliDir = resolve(scriptDir, '..') -const distDir = resolve(cliDir, 'dist') -const indexEntryPath = resolve(distDir, 'index.mjs') -const bundledJitiBabelRuntimeSourcePath = resolve(cliDir, 'node_modules', 'jiti', 'dist', 'babel.cjs') -const bundledJitiBabelRuntimeTargetPath = resolve(distDir, 'babel.cjs') - -function getCombinedOutput(stdout?: string | null, stderr?: string | null): string { - return `${stdout ?? ''}${stderr ?? ''}`.trim() -} - -function runNodeProcess( - args: readonly string[], - options?: { - readonly env?: NodeJS.ProcessEnv - } -) { - return spawnSync(process.execPath, [...args], { - cwd: cliDir, - encoding: 'utf8', - ...options?.env != null && {env: options.env} - }) -} - -function assertProcessSucceeded( - result: ReturnType, - lines: readonly string[] -): void { - if (result.error != null) { - throw result.error - } - - if (result.status === 0) { - return - } - - const combinedOutput = getCombinedOutput(result.stdout, result.stderr) - throw new Error([ - ...lines, - combinedOutput.length === 0 ? 'No output captured.' : combinedOutput - ].join('\n')) -} - -function withTempDir(prefix: string, callback: (tempDir: string) => T): T { - const tempDir = mkdtempSync(join(tmpdir(), prefix)) - - try { - return callback(tempDir) - } - finally { - rmSync(tempDir, {recursive: true, force: true}) - } -} - -function ensureIndexBundleExists(): void { - if (existsSync(indexEntryPath)) return - throw new Error(`Expected bundled CLI entry at "${indexEntryPath}" before finalizing bundle assets.`) -} - -function findBundledJitiChunkPath(): string | undefined { - const bundledJitiChunkName = readdirSync(distDir) - .find(fileName => /^jiti-.*\.mjs$/u.test(fileName)) - - return bundledJitiChunkName == null ? void 0 : resolve(distDir, bundledJitiChunkName) -} - -function ensureBundledJitiRuntimeAssets(): string | undefined { - const bundledJitiChunkPath = findBundledJitiChunkPath() - if (bundledJitiChunkPath == null) return void 0 - - if (!existsSync(bundledJitiBabelRuntimeSourcePath)) { - throw new Error( - `Bundled jiti chunk "${bundledJitiChunkPath}" requires "${bundledJitiBabelRuntimeSourcePath}", but it does not exist.` - ) - } - - copyFileSync(bundledJitiBabelRuntimeSourcePath, bundledJitiBabelRuntimeTargetPath) - return bundledJitiChunkPath -} - -function smokeTestBundledJitiTransform(bundledJitiChunkPath: string | undefined): void { - if (bundledJitiChunkPath == null) return - - withTempDir('tnmsc-bundled-jiti-', tempDir => { - const probeModulePath = join(tempDir, 'probe.ts') - const probeRunnerPath = join(tempDir, 'probe-runner.mjs') - - writeFileSync(probeModulePath, 'export default {ok: true}\n', 'utf8') - writeFileSync(probeRunnerPath, [ - "import {pathToFileURL} from 'node:url'", - '', - 'const [, , bundledJitiChunkPathArg, probeModulePathArg] = process.argv', - '', - 'const {createJiti} = await import(pathToFileURL(bundledJitiChunkPathArg).href)', - 'const runtime = createJiti(import.meta.url, {', - ' fsCache: false,', - ' moduleCache: false,', - ' interopDefault: false', - '})', - 'const loaded = await runtime.import(probeModulePathArg)', - '', - 'if (loaded.default?.ok !== true) {', - " throw new Error('Bundled jiti smoke test loaded an unexpected module shape.')", - '}', - '' - ].join('\n'), 'utf8') - - const smokeTest = runNodeProcess([probeRunnerPath, bundledJitiChunkPath, probeModulePath]) - assertProcessSucceeded(smokeTest, [ - `Bundled jiti chunk "${pathToFileURL(bundledJitiChunkPath).href}" failed the transform smoke test.` - ]) - }) -} - -function smokeTestCliEntry(): void { - withTempDir('tnmsc-index-entry-home-', isolatedHomeDir => { - const smokeTest = runNodeProcess([indexEntryPath, '--version'], { - env: { - ...process.env, - HOME: isolatedHomeDir, - USERPROFILE: isolatedHomeDir - } - }) - - assertProcessSucceeded(smokeTest, [ - `Bundled CLI entry "${indexEntryPath}" failed the runtime smoke test.`, - `Exit code: ${smokeTest.status ?? 'unknown'}` - ]) - }) -} - -ensureIndexBundleExists() -const bundledJitiChunkPath = ensureBundledJitiRuntimeAssets() -smokeTestBundledJitiTransform(bundledJitiChunkPath) -smokeTestCliEntry() - -console.log(`Finalized bundled CLI assets for ${indexEntryPath}`) diff --git a/cli/scripts/generate-schema.ts b/cli/scripts/generate-schema.ts deleted file mode 100644 index b8c124dc..00000000 --- a/cli/scripts/generate-schema.ts +++ /dev/null @@ -1,5 +0,0 @@ -import {writeFileSync} from 'node:fs' -import {TNMSC_JSON_SCHEMA} from '../src/schema.ts' - -writeFileSync('./dist/tnmsc.schema.json', `${JSON.stringify(TNMSC_JSON_SCHEMA, null, 2)}\n`, 'utf8') -console.log('Schema generated successfully!') diff --git a/cli/scripts/sync-sdk-dist.ts b/cli/scripts/sync-sdk-dist.ts new file mode 100644 index 00000000..d8610efe --- /dev/null +++ b/cli/scripts/sync-sdk-dist.ts @@ -0,0 +1,37 @@ +#!/usr/bin/env tsx + +import {cpSync, existsSync, mkdirSync, readdirSync, rmSync} from 'node:fs' +import {dirname, join, resolve} from 'node:path' +import {fileURLToPath} from 'node:url' + +const __dirname = dirname(fileURLToPath(import.meta.url)) +const cliDir = resolve(__dirname, '..') +const sdkDistDir = resolve(cliDir, '../sdk/dist') +const cliDistDir = resolve(cliDir, 'dist') + +const EXACT_FILES = new Set([ + 'babel.cjs', + 'plugin-runtime.mjs', + 'script-runtime-worker.mjs', + 'tnmsc.schema.json' +]) + +function shouldCopy(fileName: string): boolean { + return EXACT_FILES.has(fileName) || /^jiti-.*\.mjs$/u.test(fileName) +} + +if (!existsSync(sdkDistDir)) { + throw new Error(`sdk dist directory is missing: ${sdkDistDir}`) +} + +mkdirSync(cliDistDir, {recursive: true}) + +for (const fileName of readdirSync(cliDistDir)) { + if (!shouldCopy(fileName)) continue + rmSync(join(cliDistDir, fileName), {force: true, recursive: true}) +} + +for (const fileName of readdirSync(sdkDistDir)) { + if (!shouldCopy(fileName)) continue + cpSync(join(sdkDistDir, fileName), join(cliDistDir, fileName), {recursive: true}) +} From ec8d99ce0b5be1adae8e247c6ef7a8664a636eb1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Mon, 30 Mar 2026 22:33:00 +0800 Subject: [PATCH 13/27] refactor: remove WSL mirror sync functionality from cli/src/wsl-mirror-sync.ts --- cli/src/Aindex.ts | 161 -- cli/src/ConfigLoader.test.ts | 67 - cli/src/ConfigLoader.ts | 473 ---- cli/src/PluginPipeline.test.ts | 60 - cli/src/PluginPipeline.ts | 101 - cli/src/ProtectedDeletionGuard.ts | 612 ----- cli/src/aindex-config/AindexProjectConfig.ts | 29 - .../AindexProjectConfigLoader.ts | 88 - cli/src/aindex-config/index.ts | 2 - cli/src/aindex-project-series.ts | 72 - cli/src/bridge/mod.rs | 3 - cli/src/bridge/node.rs | 549 ---- cli/src/cleanup/delete-targets.ts | 71 - cli/src/cleanup/empty-directories.ts | 114 - cli/src/cli-runtime.test.ts | 67 - cli/src/cli-runtime.ts | 106 - cli/src/cli.rs | 112 - cli/src/commands/CleanCommand.ts | 34 - cli/src/commands/CleanupUtils.adapter.test.ts | 156 -- cli/src/commands/CleanupUtils.test.ts | 782 ------ cli/src/commands/CleanupUtils.ts | 462 ---- cli/src/commands/Command.ts | 95 - cli/src/commands/CommandFactory.ts | 29 - cli/src/commands/CommandRegistry.ts | 43 - cli/src/commands/CommandUtils.ts | 70 - cli/src/commands/ConfigCommand.ts | 237 -- cli/src/commands/ConfigShowCommand.ts | 48 - cli/src/commands/DryRunCleanCommand.ts | 74 - cli/src/commands/DryRunOutputCommand.ts | 51 - cli/src/commands/ExecuteCommand.ts | 79 - cli/src/commands/HelpCommand.ts | 77 - cli/src/commands/InitCommand.test.ts | 78 - cli/src/commands/InitCommand.ts | 36 - cli/src/commands/JsonOutputCommand.ts | 56 - cli/src/commands/PluginsCommand.ts | 54 - .../ProtectedDeletionCommands.test.ts | 277 -- cli/src/commands/SetCommand.ts | 0 cli/src/commands/UnknownCommand.ts | 34 - cli/src/commands/VersionCommand.ts | 29 - cli/src/commands/bridge.rs | 23 - cli/src/commands/config_cmd.rs | 108 - cli/src/commands/config_show.rs | 44 - .../commands/factories/CleanCommandFactory.ts | 20 - .../factories/ConfigCommandFactory.ts | 29 - .../factories/DryRunCommandFactory.ts | 19 - .../factories/ExecuteCommandFactory.ts | 20 - .../commands/factories/HelpCommandFactory.ts | 22 - .../commands/factories/InitCommandFactory.ts | 15 - .../factories/PluginsCommandFactory.ts | 19 - .../factories/UnknownCommandFactory.ts | 22 - .../factories/VersionCommandFactory.ts | 22 - cli/src/commands/help.rs | 26 - cli/src/commands/mod.rs | 5 - cli/src/commands/version.rs | 6 - cli/src/config.outputScopes.test.ts | 45 - cli/src/config.plugins-fast-path.test.ts | 50 - cli/src/config.test.ts | 173 -- cli/src/config.ts | 475 ---- cli/src/core/cleanup.rs | 2309 ----------------- cli/src/core/config/mod.rs | 1513 ----------- cli/src/core/config/series_filter.rs | 228 -- cli/src/core/desk-paths.ts | 179 -- cli/src/core/desk_paths.rs | 623 ----- cli/src/core/input_plugins.rs | 9 - cli/src/core/mod.rs | 5 - cli/src/core/native-binding.ts | 63 - cli/src/core/plugin_shared.rs | 623 ----- cli/src/diagnostic_helpers.rs | 32 - cli/src/diagnostics.test.ts | 54 - cli/src/diagnostics.ts | 415 --- cli/src/globals.ts | 2 +- cli/src/index.test.ts | 19 +- cli/src/index.ts | 17 +- cli/src/inputs/AbstractInputCapability.ts | 186 -- cli/src/inputs/effect-md-cleanup.ts | 166 -- cli/src/inputs/effect-orphan-cleanup.test.ts | 249 -- cli/src/inputs/effect-orphan-cleanup.ts | 308 --- cli/src/inputs/effect-skill-sync.test.ts | 115 - cli/src/inputs/effect-skill-sync.ts | 181 -- cli/src/inputs/index.ts | 59 - .../input-agentskills-export-fallback.test.ts | 80 - cli/src/inputs/input-agentskills-types.ts | 10 - cli/src/inputs/input-agentskills.test.ts | 179 -- cli/src/inputs/input-agentskills.ts | 836 ------ cli/src/inputs/input-aindex.test.ts | 187 -- cli/src/inputs/input-aindex.ts | 270 -- cli/src/inputs/input-command.test.ts | 148 -- cli/src/inputs/input-command.ts | 152 -- cli/src/inputs/input-editorconfig.ts | 23 - cli/src/inputs/input-git-exclude.ts | 32 - cli/src/inputs/input-gitignore.ts | 32 - cli/src/inputs/input-global-memory.ts | 136 - cli/src/inputs/input-jetbrains-config.ts | 31 - cli/src/inputs/input-project-prompt.test.ts | 176 -- cli/src/inputs/input-project-prompt.ts | 435 ---- cli/src/inputs/input-public-config.test.ts | 450 ---- cli/src/inputs/input-readme.test.ts | 49 - cli/src/inputs/input-readme.ts | 270 -- cli/src/inputs/input-rule.test.ts | 93 - cli/src/inputs/input-rule.ts | 103 - cli/src/inputs/input-shared-ignore.ts | 35 - cli/src/inputs/input-subagent.test.ts | 224 -- cli/src/inputs/input-subagent.ts | 179 -- cli/src/inputs/input-vscode-config.ts | 27 - cli/src/inputs/input-workspace.ts | 28 - cli/src/inputs/input-zed-config.ts | 23 - cli/src/inputs/runtime.ts | 172 -- cli/src/lib.rs | 546 ---- cli/src/main.rs | 7 +- cli/src/pipeline/CliArgumentParser.test.ts | 9 - cli/src/pipeline/CliArgumentParser.ts | 265 -- cli/src/pipeline/ContextMerger.ts | 207 -- cli/src/pipeline/DependencyResolver.ts | 136 - cli/src/pipeline/OutputRuntimeTargets.ts | 57 - cli/src/plugin-runtime.ts | 128 - cli/src/plugin.config.ts | 58 - cli/src/plugins/AbstractOutputPlugin.test.ts | 122 - cli/src/plugins/AgentsOutputPlugin.test.ts | 124 - cli/src/plugins/AgentsOutputPlugin.ts | 127 - cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts | 123 - cli/src/plugins/CodexCLIOutputPlugin.test.ts | 364 --- cli/src/plugins/CodexCLIOutputPlugin.ts | 124 - cli/src/plugins/CursorOutputPlugin.test.ts | 351 --- cli/src/plugins/CursorOutputPlugin.ts | 561 ---- cli/src/plugins/DroidCLIOutputPlugin.ts | 56 - cli/src/plugins/EditorConfigOutputPlugin.ts | 59 - cli/src/plugins/GeminiCLIOutputPlugin.ts | 57 - .../plugins/GenericSkillsOutputPlugin.test.ts | 192 -- cli/src/plugins/GenericSkillsOutputPlugin.ts | 245 -- cli/src/plugins/GitExcludeOutputPlugin.ts | 90 - .../JetBrainsAIAssistantCodexOutputPlugin.ts | 366 --- ...JetBrainsIDECodeStyleConfigOutputPlugin.ts | 68 - .../plugins/OpencodeCLIOutputPlugin.test.ts | 118 - cli/src/plugins/OpencodeCLIOutputPlugin.ts | 499 ---- cli/src/plugins/PromptMarkdownCleanup.test.ts | 259 -- .../QoderIDEPluginOutputPlugin.test.ts | 396 --- cli/src/plugins/QoderIDEPluginOutputPlugin.ts | 419 --- .../plugins/ReadmeMdConfigFileOutputPlugin.ts | 72 - cli/src/plugins/TraeCNIDEOutputPlugin.ts | 60 - cli/src/plugins/TraeIDEOutputPlugin.test.ts | 125 - cli/src/plugins/TraeIDEOutputPlugin.ts | 295 --- .../VisualStudioCodeIDEConfigOutputPlugin.ts | 65 - cli/src/plugins/WarpIDEOutputPlugin.test.ts | 75 - cli/src/plugins/WarpIDEOutputPlugin.ts | 110 - cli/src/plugins/WindsurfOutputPlugin.test.ts | 212 -- cli/src/plugins/WindsurfOutputPlugin.ts | 278 -- cli/src/plugins/WslMirrorDeclarations.test.ts | 25 - cli/src/plugins/ZedIDEConfigOutputPlugin.ts | 64 - cli/src/plugins/desk-paths.test.ts | 141 - cli/src/plugins/desk-paths.ts | 1 - cli/src/plugins/ide-config-output.test.ts | 238 -- cli/src/plugins/plugin-agentskills-compact.ts | 3 - cli/src/plugins/plugin-agentsmd.ts | 3 - cli/src/plugins/plugin-claude-code-cli.ts | 3 - cli/src/plugins/plugin-core.ts | 172 -- .../AbstractOutputPlugin.frontmatter.test.ts | 204 -- .../AbstractOutputPlugin.subagents.test.ts | 114 - .../plugin-core/AbstractOutputPlugin.ts | 1424 ---------- cli/src/plugins/plugin-core/AbstractPlugin.ts | 26 - .../plugin-core/AindexConfigDefaults.ts | 123 - cli/src/plugins/plugin-core/AindexTypes.ts | 367 --- .../plugins/plugin-core/ConfigTypes.schema.ts | 188 -- .../plugin-core/DistPromptGuards.test.ts | 22 - .../plugins/plugin-core/DistPromptGuards.ts | 68 - .../plugin-core/ExportMetadataTypes.ts | 278 -- .../plugin-core/GlobalScopeCollector.ts | 231 -- cli/src/plugins/plugin-core/InputTypes.ts | 418 --- .../plugin-core/LocalizedPromptReader.ts | 736 ------ .../plugins/plugin-core/McpConfigManager.ts | 251 -- cli/src/plugins/plugin-core/OutputTypes.ts | 145 -- .../plugin-core/PromptArtifactCache.test.ts | 203 -- .../plugin-core/PromptArtifactCache.ts | 317 --- .../PromptCompilerDiagnostics.test.ts | 47 - .../plugin-core/PromptCompilerDiagnostics.ts | 65 - cli/src/plugins/plugin-core/PromptIdentity.ts | 59 - cli/src/plugins/plugin-core/PromptTypes.ts | 184 -- cli/src/plugins/plugin-core/RegistryWriter.ts | 179 -- cli/src/plugins/plugin-core/constants.ts | 113 - cli/src/plugins/plugin-core/enums.ts | 53 - cli/src/plugins/plugin-core/filters.ts | 261 -- .../plugin.outputScopes.validation.test.ts | 182 -- cli/src/plugins/plugin-core/plugin.ts | 541 ---- .../plugins/plugin-core/scopePolicy.test.ts | 50 - cli/src/plugins/plugin-core/scopePolicy.ts | 73 - cli/src/plugins/plugin-core/types.ts | 39 - cli/src/plugins/plugin-cursor.ts | 3 - cli/src/plugins/plugin-droid-cli.ts | 3 - cli/src/plugins/plugin-editorconfig.ts | 3 - cli/src/plugins/plugin-gemini-cli.ts | 3 - cli/src/plugins/plugin-git-exclude.ts | 3 - cli/src/plugins/plugin-jetbrains-ai-codex.ts | 3 - cli/src/plugins/plugin-jetbrains-codestyle.ts | 3 - cli/src/plugins/plugin-openai-codex-cli.ts | 3 - cli/src/plugins/plugin-opencode-cli.ts | 3 - cli/src/plugins/plugin-qoder-ide.ts | 3 - cli/src/plugins/plugin-readme.ts | 3 - cli/src/plugins/plugin-trae-cn-ide.ts | 3 - cli/src/plugins/plugin-trae-ide.ts | 3 - cli/src/plugins/plugin-vscode.ts | 3 - cli/src/plugins/plugin-warp-ide.ts | 3 - cli/src/plugins/plugin-windsurf.ts | 3 - cli/src/plugins/plugin-zed.ts | 3 - cli/src/prompts.test.ts | 367 --- cli/src/prompts.ts | 804 ------ cli/src/public-config-paths.ts | 208 -- cli/src/runtime-environment.test.ts | 149 -- cli/src/runtime-environment.ts | 361 --- cli/src/schema.ts | 14 - cli/src/script-runtime-worker.ts | 19 - cli/src/wsl-mirror-sync.test.ts | 588 ----- cli/src/wsl-mirror-sync.ts | 656 ----- 211 files changed, 16 insertions(+), 36603 deletions(-) delete mode 100644 cli/src/Aindex.ts delete mode 100644 cli/src/ConfigLoader.test.ts delete mode 100644 cli/src/ConfigLoader.ts delete mode 100644 cli/src/PluginPipeline.test.ts delete mode 100644 cli/src/PluginPipeline.ts delete mode 100644 cli/src/ProtectedDeletionGuard.ts delete mode 100644 cli/src/aindex-config/AindexProjectConfig.ts delete mode 100644 cli/src/aindex-config/AindexProjectConfigLoader.ts delete mode 100644 cli/src/aindex-config/index.ts delete mode 100644 cli/src/aindex-project-series.ts delete mode 100644 cli/src/bridge/mod.rs delete mode 100644 cli/src/bridge/node.rs delete mode 100644 cli/src/cleanup/delete-targets.ts delete mode 100644 cli/src/cleanup/empty-directories.ts delete mode 100644 cli/src/cli-runtime.test.ts delete mode 100644 cli/src/cli-runtime.ts delete mode 100644 cli/src/commands/CleanCommand.ts delete mode 100644 cli/src/commands/CleanupUtils.adapter.test.ts delete mode 100644 cli/src/commands/CleanupUtils.test.ts delete mode 100644 cli/src/commands/CleanupUtils.ts delete mode 100644 cli/src/commands/Command.ts delete mode 100644 cli/src/commands/CommandFactory.ts delete mode 100644 cli/src/commands/CommandRegistry.ts delete mode 100644 cli/src/commands/CommandUtils.ts delete mode 100644 cli/src/commands/ConfigCommand.ts delete mode 100644 cli/src/commands/ConfigShowCommand.ts delete mode 100644 cli/src/commands/DryRunCleanCommand.ts delete mode 100644 cli/src/commands/DryRunOutputCommand.ts delete mode 100644 cli/src/commands/ExecuteCommand.ts delete mode 100644 cli/src/commands/HelpCommand.ts delete mode 100644 cli/src/commands/InitCommand.test.ts delete mode 100644 cli/src/commands/InitCommand.ts delete mode 100644 cli/src/commands/JsonOutputCommand.ts delete mode 100644 cli/src/commands/PluginsCommand.ts delete mode 100644 cli/src/commands/ProtectedDeletionCommands.test.ts delete mode 100644 cli/src/commands/SetCommand.ts delete mode 100644 cli/src/commands/UnknownCommand.ts delete mode 100644 cli/src/commands/VersionCommand.ts delete mode 100644 cli/src/commands/bridge.rs delete mode 100644 cli/src/commands/config_cmd.rs delete mode 100644 cli/src/commands/config_show.rs delete mode 100644 cli/src/commands/factories/CleanCommandFactory.ts delete mode 100644 cli/src/commands/factories/ConfigCommandFactory.ts delete mode 100644 cli/src/commands/factories/DryRunCommandFactory.ts delete mode 100644 cli/src/commands/factories/ExecuteCommandFactory.ts delete mode 100644 cli/src/commands/factories/HelpCommandFactory.ts delete mode 100644 cli/src/commands/factories/InitCommandFactory.ts delete mode 100644 cli/src/commands/factories/PluginsCommandFactory.ts delete mode 100644 cli/src/commands/factories/UnknownCommandFactory.ts delete mode 100644 cli/src/commands/factories/VersionCommandFactory.ts delete mode 100644 cli/src/commands/help.rs delete mode 100644 cli/src/commands/mod.rs delete mode 100644 cli/src/commands/version.rs delete mode 100644 cli/src/config.outputScopes.test.ts delete mode 100644 cli/src/config.plugins-fast-path.test.ts delete mode 100644 cli/src/config.test.ts delete mode 100644 cli/src/config.ts delete mode 100644 cli/src/core/cleanup.rs delete mode 100644 cli/src/core/config/mod.rs delete mode 100644 cli/src/core/config/series_filter.rs delete mode 100644 cli/src/core/desk-paths.ts delete mode 100644 cli/src/core/desk_paths.rs delete mode 100644 cli/src/core/input_plugins.rs delete mode 100644 cli/src/core/mod.rs delete mode 100644 cli/src/core/native-binding.ts delete mode 100644 cli/src/core/plugin_shared.rs delete mode 100644 cli/src/diagnostic_helpers.rs delete mode 100644 cli/src/diagnostics.test.ts delete mode 100644 cli/src/diagnostics.ts delete mode 100644 cli/src/inputs/AbstractInputCapability.ts delete mode 100644 cli/src/inputs/effect-md-cleanup.ts delete mode 100644 cli/src/inputs/effect-orphan-cleanup.test.ts delete mode 100644 cli/src/inputs/effect-orphan-cleanup.ts delete mode 100644 cli/src/inputs/effect-skill-sync.test.ts delete mode 100644 cli/src/inputs/effect-skill-sync.ts delete mode 100644 cli/src/inputs/index.ts delete mode 100644 cli/src/inputs/input-agentskills-export-fallback.test.ts delete mode 100644 cli/src/inputs/input-agentskills-types.ts delete mode 100644 cli/src/inputs/input-agentskills.test.ts delete mode 100644 cli/src/inputs/input-agentskills.ts delete mode 100644 cli/src/inputs/input-aindex.test.ts delete mode 100644 cli/src/inputs/input-aindex.ts delete mode 100644 cli/src/inputs/input-command.test.ts delete mode 100644 cli/src/inputs/input-command.ts delete mode 100644 cli/src/inputs/input-editorconfig.ts delete mode 100644 cli/src/inputs/input-git-exclude.ts delete mode 100644 cli/src/inputs/input-gitignore.ts delete mode 100644 cli/src/inputs/input-global-memory.ts delete mode 100644 cli/src/inputs/input-jetbrains-config.ts delete mode 100644 cli/src/inputs/input-project-prompt.test.ts delete mode 100644 cli/src/inputs/input-project-prompt.ts delete mode 100644 cli/src/inputs/input-public-config.test.ts delete mode 100644 cli/src/inputs/input-readme.test.ts delete mode 100644 cli/src/inputs/input-readme.ts delete mode 100644 cli/src/inputs/input-rule.test.ts delete mode 100644 cli/src/inputs/input-rule.ts delete mode 100644 cli/src/inputs/input-shared-ignore.ts delete mode 100644 cli/src/inputs/input-subagent.test.ts delete mode 100644 cli/src/inputs/input-subagent.ts delete mode 100644 cli/src/inputs/input-vscode-config.ts delete mode 100644 cli/src/inputs/input-workspace.ts delete mode 100644 cli/src/inputs/input-zed-config.ts delete mode 100644 cli/src/inputs/runtime.ts delete mode 100644 cli/src/lib.rs delete mode 100644 cli/src/pipeline/CliArgumentParser.test.ts delete mode 100644 cli/src/pipeline/CliArgumentParser.ts delete mode 100644 cli/src/pipeline/ContextMerger.ts delete mode 100644 cli/src/pipeline/DependencyResolver.ts delete mode 100644 cli/src/pipeline/OutputRuntimeTargets.ts delete mode 100644 cli/src/plugin-runtime.ts delete mode 100644 cli/src/plugin.config.ts delete mode 100644 cli/src/plugins/AbstractOutputPlugin.test.ts delete mode 100644 cli/src/plugins/AgentsOutputPlugin.test.ts delete mode 100644 cli/src/plugins/AgentsOutputPlugin.ts delete mode 100644 cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts delete mode 100644 cli/src/plugins/CodexCLIOutputPlugin.test.ts delete mode 100644 cli/src/plugins/CodexCLIOutputPlugin.ts delete mode 100644 cli/src/plugins/CursorOutputPlugin.test.ts delete mode 100644 cli/src/plugins/CursorOutputPlugin.ts delete mode 100644 cli/src/plugins/DroidCLIOutputPlugin.ts delete mode 100644 cli/src/plugins/EditorConfigOutputPlugin.ts delete mode 100644 cli/src/plugins/GeminiCLIOutputPlugin.ts delete mode 100644 cli/src/plugins/GenericSkillsOutputPlugin.test.ts delete mode 100644 cli/src/plugins/GenericSkillsOutputPlugin.ts delete mode 100644 cli/src/plugins/GitExcludeOutputPlugin.ts delete mode 100644 cli/src/plugins/JetBrainsAIAssistantCodexOutputPlugin.ts delete mode 100644 cli/src/plugins/JetBrainsIDECodeStyleConfigOutputPlugin.ts delete mode 100644 cli/src/plugins/OpencodeCLIOutputPlugin.test.ts delete mode 100644 cli/src/plugins/OpencodeCLIOutputPlugin.ts delete mode 100644 cli/src/plugins/PromptMarkdownCleanup.test.ts delete mode 100644 cli/src/plugins/QoderIDEPluginOutputPlugin.test.ts delete mode 100644 cli/src/plugins/QoderIDEPluginOutputPlugin.ts delete mode 100644 cli/src/plugins/ReadmeMdConfigFileOutputPlugin.ts delete mode 100644 cli/src/plugins/TraeCNIDEOutputPlugin.ts delete mode 100644 cli/src/plugins/TraeIDEOutputPlugin.test.ts delete mode 100644 cli/src/plugins/TraeIDEOutputPlugin.ts delete mode 100644 cli/src/plugins/VisualStudioCodeIDEConfigOutputPlugin.ts delete mode 100644 cli/src/plugins/WarpIDEOutputPlugin.test.ts delete mode 100644 cli/src/plugins/WarpIDEOutputPlugin.ts delete mode 100644 cli/src/plugins/WindsurfOutputPlugin.test.ts delete mode 100644 cli/src/plugins/WindsurfOutputPlugin.ts delete mode 100644 cli/src/plugins/WslMirrorDeclarations.test.ts delete mode 100644 cli/src/plugins/ZedIDEConfigOutputPlugin.ts delete mode 100644 cli/src/plugins/desk-paths.test.ts delete mode 100644 cli/src/plugins/desk-paths.ts delete mode 100644 cli/src/plugins/ide-config-output.test.ts delete mode 100644 cli/src/plugins/plugin-agentskills-compact.ts delete mode 100644 cli/src/plugins/plugin-agentsmd.ts delete mode 100644 cli/src/plugins/plugin-claude-code-cli.ts delete mode 100644 cli/src/plugins/plugin-core.ts delete mode 100644 cli/src/plugins/plugin-core/AbstractOutputPlugin.frontmatter.test.ts delete mode 100644 cli/src/plugins/plugin-core/AbstractOutputPlugin.subagents.test.ts delete mode 100644 cli/src/plugins/plugin-core/AbstractOutputPlugin.ts delete mode 100644 cli/src/plugins/plugin-core/AbstractPlugin.ts delete mode 100644 cli/src/plugins/plugin-core/AindexConfigDefaults.ts delete mode 100644 cli/src/plugins/plugin-core/AindexTypes.ts delete mode 100644 cli/src/plugins/plugin-core/ConfigTypes.schema.ts delete mode 100644 cli/src/plugins/plugin-core/DistPromptGuards.test.ts delete mode 100644 cli/src/plugins/plugin-core/DistPromptGuards.ts delete mode 100644 cli/src/plugins/plugin-core/ExportMetadataTypes.ts delete mode 100644 cli/src/plugins/plugin-core/GlobalScopeCollector.ts delete mode 100644 cli/src/plugins/plugin-core/InputTypes.ts delete mode 100644 cli/src/plugins/plugin-core/LocalizedPromptReader.ts delete mode 100644 cli/src/plugins/plugin-core/McpConfigManager.ts delete mode 100644 cli/src/plugins/plugin-core/OutputTypes.ts delete mode 100644 cli/src/plugins/plugin-core/PromptArtifactCache.test.ts delete mode 100644 cli/src/plugins/plugin-core/PromptArtifactCache.ts delete mode 100644 cli/src/plugins/plugin-core/PromptCompilerDiagnostics.test.ts delete mode 100644 cli/src/plugins/plugin-core/PromptCompilerDiagnostics.ts delete mode 100644 cli/src/plugins/plugin-core/PromptIdentity.ts delete mode 100644 cli/src/plugins/plugin-core/PromptTypes.ts delete mode 100644 cli/src/plugins/plugin-core/RegistryWriter.ts delete mode 100644 cli/src/plugins/plugin-core/constants.ts delete mode 100644 cli/src/plugins/plugin-core/enums.ts delete mode 100644 cli/src/plugins/plugin-core/filters.ts delete mode 100644 cli/src/plugins/plugin-core/plugin.outputScopes.validation.test.ts delete mode 100644 cli/src/plugins/plugin-core/plugin.ts delete mode 100644 cli/src/plugins/plugin-core/scopePolicy.test.ts delete mode 100644 cli/src/plugins/plugin-core/scopePolicy.ts delete mode 100644 cli/src/plugins/plugin-core/types.ts delete mode 100644 cli/src/plugins/plugin-cursor.ts delete mode 100644 cli/src/plugins/plugin-droid-cli.ts delete mode 100644 cli/src/plugins/plugin-editorconfig.ts delete mode 100644 cli/src/plugins/plugin-gemini-cli.ts delete mode 100644 cli/src/plugins/plugin-git-exclude.ts delete mode 100644 cli/src/plugins/plugin-jetbrains-ai-codex.ts delete mode 100644 cli/src/plugins/plugin-jetbrains-codestyle.ts delete mode 100644 cli/src/plugins/plugin-openai-codex-cli.ts delete mode 100644 cli/src/plugins/plugin-opencode-cli.ts delete mode 100644 cli/src/plugins/plugin-qoder-ide.ts delete mode 100644 cli/src/plugins/plugin-readme.ts delete mode 100644 cli/src/plugins/plugin-trae-cn-ide.ts delete mode 100644 cli/src/plugins/plugin-trae-ide.ts delete mode 100644 cli/src/plugins/plugin-vscode.ts delete mode 100644 cli/src/plugins/plugin-warp-ide.ts delete mode 100644 cli/src/plugins/plugin-windsurf.ts delete mode 100644 cli/src/plugins/plugin-zed.ts delete mode 100644 cli/src/prompts.test.ts delete mode 100644 cli/src/prompts.ts delete mode 100644 cli/src/public-config-paths.ts delete mode 100644 cli/src/runtime-environment.test.ts delete mode 100644 cli/src/runtime-environment.ts delete mode 100644 cli/src/schema.ts delete mode 100644 cli/src/script-runtime-worker.ts delete mode 100644 cli/src/wsl-mirror-sync.test.ts delete mode 100644 cli/src/wsl-mirror-sync.ts diff --git a/cli/src/Aindex.ts b/cli/src/Aindex.ts deleted file mode 100644 index 51c66948..00000000 --- a/cli/src/Aindex.ts +++ /dev/null @@ -1,161 +0,0 @@ -/** - * Aindex validation and generation utilities - * 使用扁平的 bundles 结构直接遍历创建项目目录和文件 - */ -import type {AindexConfig, ILogger} from './plugins/plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {buildUsageDiagnostic, diagnosticLines} from '@/diagnostics' - -/** - * Version control check result - */ -export interface VersionControlCheckResult { - readonly hasGit: boolean - readonly gitPath: string -} - -/** - * Check if the aindex has version control (.git directory) - * Logs info if .git exists, warns if not - * - * @param rootPath - Root path of the aindex - * @param logger - Optional logger instance - * @returns Version control check result - */ -export function checkVersionControl( - rootPath: string, - logger?: ILogger -): VersionControlCheckResult { - const gitPath = path.join(rootPath, '.git') - const hasGit = fs.existsSync(gitPath) - - if (hasGit) logger?.info('version control detected', {path: gitPath}) - else { - logger?.warn(buildUsageDiagnostic({ - code: 'AINDEX_VERSION_CONTROL_MISSING', - title: 'Aindex root is not under version control', - rootCause: diagnosticLines(`tnmsc did not find a .git directory under "${rootPath}".`), - exactFix: diagnosticLines( - `Initialize git in "${rootPath}" or place the aindex inside an existing git repository.` - ), - possibleFixes: [ - diagnosticLines('Run `git init` in the aindex root if the directory should be versioned.') - ], - details: { - rootPath, - gitPath - } - })) - } - - return {hasGit, gitPath} -} - -/** - * Generation result - */ -export interface GenerationResult { - readonly success: boolean - readonly rootPath: string - readonly createdDirs: readonly string[] - readonly createdFiles: readonly string[] - readonly existedDirs: readonly string[] - readonly existedFiles: readonly string[] -} - -/** - * Generation options - */ -export interface GenerationOptions { - /** Logger instance */ - readonly logger?: ILogger - /** Aindex structure from user config */ - readonly config?: Required -} - -const DEFAULT_FILE_CONTENT = '# Generated by tnmsc init\n' - -function isFilePath(relativePath: string): boolean { - return path.extname(relativePath).length > 0 -} - -/** - * Generate aindex directory structure - */ -export function generateAindex( - rootPath: string, - options: GenerationOptions = {} -): GenerationResult { - const {logger, config} = options - const createdDirs: string[] = [] - const createdFiles: string[] = [] - const existedDirs: string[] = [] - const existedFiles: string[] = [] - const createdDirsSet = new Set() - const existedDirsSet = new Set() - const existedFilesSet = new Set() - - const ensureDirectory = (dirPath: string): void => { - if (fs.existsSync(dirPath)) { - if (!existedDirsSet.has(dirPath)) { - existedDirsSet.add(dirPath) - existedDirs.push(dirPath) - logger?.debug('directory exists', {path: dirPath}) - } - return - } - - fs.mkdirSync(dirPath, {recursive: true}) - - let currentDir = dirPath - while (!createdDirsSet.has(currentDir)) { - createdDirsSet.add(currentDir) - createdDirs.push(currentDir) - logger?.info('created directory', {path: currentDir}) - - if (currentDir === rootPath) break - currentDir = path.dirname(currentDir) - } - } - - const ensureFile = (filePath: string, content: string = DEFAULT_FILE_CONTENT): void => { - ensureDirectory(path.dirname(filePath)) - - if (fs.existsSync(filePath)) { - if (!existedFilesSet.has(filePath)) { - existedFilesSet.add(filePath) - existedFiles.push(filePath) - logger?.debug('file exists', {path: filePath}) - } - return - } - - fs.writeFileSync(filePath, content, 'utf8') - createdFiles.push(filePath) - logger?.info('created file', {path: filePath}) - } - - ensureDirectory(rootPath) - - if (config != null) { - for (const [key, moduleConfig] of Object.entries(config)) { - if (key === 'dir' || typeof moduleConfig !== 'object' || moduleConfig == null) continue - - for (const relativePath of [moduleConfig.src, moduleConfig.dist]) { - const targetPath = path.join(rootPath, relativePath) - if (isFilePath(relativePath)) ensureFile(targetPath) - else ensureDirectory(targetPath) - } - } - } - - return { - success: true, - rootPath, - createdDirs, - createdFiles, - existedDirs, - existedFiles - } -} diff --git a/cli/src/ConfigLoader.test.ts b/cli/src/ConfigLoader.test.ts deleted file mode 100644 index 7a72fc12..00000000 --- a/cli/src/ConfigLoader.test.ts +++ /dev/null @@ -1,67 +0,0 @@ -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import {afterEach, describe, expect, it} from 'vitest' -import {ConfigLoader, getGlobalConfigPath} from './ConfigLoader' - -describe('configLoader', () => { - const originalHome = process.env.HOME - const originalUserProfile = process.env.USERPROFILE - const originalHomeDrive = process.env.HOMEDRIVE - const originalHomePath = process.env.HOMEPATH - - afterEach(() => { - process.env.HOME = originalHome - process.env.USERPROFILE = originalUserProfile - process.env.HOMEDRIVE = originalHomeDrive - process.env.HOMEPATH = originalHomePath - }) - - it('searches only the canonical global config path', () => { - const tempHome = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-home-')) - process.env.HOME = tempHome - process.env.USERPROFILE = tempHome - delete process.env.HOMEDRIVE - delete process.env.HOMEPATH - - try { - const loader = new ConfigLoader() - expect(loader.getSearchPaths(path.join(tempHome, 'workspace'))).toEqual([getGlobalConfigPath()]) - } - finally { - fs.rmSync(tempHome, {recursive: true, force: true}) - } - }) - - it('defaults aindex.softwares when loading an older config file', () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-config-loader-')) - const configPath = path.join(tempDir, '.tnmsc.json') - - try { - fs.writeFileSync(configPath, JSON.stringify({ - workspaceDir: '/tmp/workspace', - aindex: { - dir: 'aindex', - skills: {src: 'skills', dist: 'dist/skills'}, - commands: {src: 'commands', dist: 'dist/commands'}, - subAgents: {src: 'subagents', dist: 'dist/subagents'}, - rules: {src: 'rules', dist: 'dist/rules'}, - globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, - workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, - app: {src: 'app', dist: 'dist/app'}, - ext: {src: 'ext', dist: 'dist/ext'}, - arch: {src: 'arch', dist: 'dist/arch'} - } - }), 'utf8') - - const loader = new ConfigLoader() - const result = loader.loadFromFile(configPath) - - expect(result.found).toBe(true) - expect(result.config.aindex?.softwares).toEqual({src: 'softwares', dist: 'dist/softwares'}) - } - finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/ConfigLoader.ts b/cli/src/ConfigLoader.ts deleted file mode 100644 index 90e15cff..00000000 --- a/cli/src/ConfigLoader.ts +++ /dev/null @@ -1,473 +0,0 @@ -import type {ILogger} from '@truenine/logger' -import type { - AindexConfig, - CleanupProtectionOptions, - ConfigLoaderOptions, - ConfigLoadResult, - FrontMatterOptions, - OutputScopeOptions, - PluginOutputScopeTopics, - UserConfigFile, - WindowsOptions -} from './plugins/plugin-core' -import * as fs from 'node:fs' -import process from 'node:process' -import {createLogger} from '@truenine/logger' -import { - buildConfigDiagnostic, - buildFileOperationDiagnostic, - diagnosticLines, - splitDiagnosticText -} from './diagnostics' -import {mergeAindexConfig, ZUserConfigFile} from './plugins/plugin-core' -import { - getRequiredGlobalConfigPath, - resolveRuntimeEnvironment, - resolveUserPath, - DEFAULT_GLOBAL_CONFIG_FILE_NAME as RUNTIME_DEFAULT_CONFIG_FILE_NAME, - DEFAULT_GLOBAL_CONFIG_DIR as RUNTIME_DEFAULT_GLOBAL_CONFIG_DIR -} from './runtime-environment' - -/** - * Default config file name - */ -export const DEFAULT_CONFIG_FILE_NAME = '.tnmsc.json' - -/** - * Default global config directory (relative to home) - */ -export const DEFAULT_GLOBAL_CONFIG_DIR = '.aindex' - -/** - * Get global config file path - */ -export function getGlobalConfigPath(): string { - return getRequiredGlobalConfigPath() -} - -/** - * Validation result for global config - */ -export interface GlobalConfigValidationResult { - readonly valid: boolean - - readonly exists: boolean - - readonly errors: readonly string[] - - readonly shouldExit: boolean -} - -/** - * ConfigLoader handles discovery and loading of user configuration files. - * - * The config source is fixed and unambiguous: - * 1. Global: ~/.aindex/.tnmsc.json - */ -export class ConfigLoader { - private readonly logger: ILogger - - constructor(options: ConfigLoaderOptions = {}) { - void options - this.logger = createLogger('ConfigLoader') - } - - getSearchPaths(cwd: string = process.cwd()): string[] { - void cwd - const runtimeEnvironment = resolveRuntimeEnvironment() - - if (!runtimeEnvironment.isWsl) return [getRequiredGlobalConfigPath()] - - this.logger.info('wsl environment detected', { - effectiveHomeDir: runtimeEnvironment.effectiveHomeDir - }) - if (runtimeEnvironment.selectedGlobalConfigPath == null) { - throw new Error( - `WSL host config file not found under "${runtimeEnvironment.windowsUsersRoot}/*/${DEFAULT_GLOBAL_CONFIG_DIR}/${DEFAULT_CONFIG_FILE_NAME}".` - ) - } - this.logger.info('using wsl host global config', { - path: runtimeEnvironment.selectedGlobalConfigPath - }) - return [getRequiredGlobalConfigPath()] - } - - loadFromFile(filePath: string): ConfigLoadResult { - const resolvedPath = this.resolveTilde(filePath) - - try { - if (!fs.existsSync(resolvedPath)) return {config: {}, source: null, found: false} - - const content = fs.readFileSync(resolvedPath, 'utf8') - const config = this.parseConfig(content, resolvedPath) - - this.logger.debug('loaded', {source: resolvedPath}) - return {config, source: resolvedPath, found: true} - } - catch (error) { - this.logger.warn(buildFileOperationDiagnostic({ - code: 'CONFIG_FILE_LOAD_FAILED', - title: 'Failed to load config file', - operation: 'read', - targetKind: 'config file', - path: resolvedPath, - error - })) - return {config: {}, source: null, found: false} - } - } - - load(cwd: string = process.cwd()): MergedConfigResult { - const searchPaths = this.getSearchPaths(cwd) - const loadedConfigs: ConfigLoadResult[] = [] - - for (const searchPath of searchPaths) { - const result = this.loadFromFile(searchPath) - if (result.found) loadedConfigs.push(result) - } - - const merged = this.mergeConfigs(loadedConfigs.map(r => r.config)) // Merge configs (first has highest priority) - const sources = loadedConfigs.map(r => r.source).filter((s): s is string => s !== null) - - return { - config: merged, - sources, - found: loadedConfigs.length > 0 - } - } - - private parseConfig(content: string, filePath: string): UserConfigFile { - let parsed: unknown - try { - parsed = JSON.parse(content) - } - catch (error) { - if (error instanceof SyntaxError) throw new Error(`Invalid JSON in ${filePath}: ${error.message}`) - throw error - } - - const result = ZUserConfigFile.safeParse(parsed) - if (result.success) return result.data - - const errors = result.error.issues.map((i: {path: (string | number)[], message: string}) => `${i.path.join('.')}: ${i.message}`) // Validation failed - throw error instead of returning empty config - throw new Error(`Config validation failed in ${filePath}:\n${errors.join('\n')}`) - } - - private mergeConfigs(configs: UserConfigFile[]): UserConfigFile { - if (configs.length === 0) return {} - - const firstConfig = configs[0] - if (configs.length === 1 && firstConfig != null) return firstConfig - - const reversed = [...configs].reverse() // Reverse to merge from lowest to highest priority - - return reversed.reduce((acc, config) => { - const mergedAindex = this.mergeAindex(acc.aindex, config.aindex) - const mergedOutputScopes = this.mergeOutputScopeOptions(acc.outputScopes, config.outputScopes) - const mergedFrontMatter = this.mergeFrontMatterOptions(acc.frontMatter, config.frontMatter) - const mergedCleanupProtection = this.mergeCleanupProtectionOptions( - acc.cleanupProtection, - config.cleanupProtection - ) - const mergedWindows = this.mergeWindowsOptions(acc.windows, config.windows) - - return { - ...acc, - ...config, - ...mergedAindex != null ? {aindex: mergedAindex} : {}, - ...mergedOutputScopes != null ? {outputScopes: mergedOutputScopes} : {}, - ...mergedFrontMatter != null ? {frontMatter: mergedFrontMatter} : {}, - ...mergedCleanupProtection != null ? {cleanupProtection: mergedCleanupProtection} : {}, - ...mergedWindows != null ? {windows: mergedWindows} : {} - } - }, {}) - } - - private mergeAindex( - a?: AindexConfig, - b?: AindexConfig - ): AindexConfig | undefined { - if (a == null && b == null) return void 0 - if (a == null) return b - if (b == null) return a - return mergeAindexConfig(a, b) - } - - private mergeOutputScopeTopics( - a?: PluginOutputScopeTopics, - b?: PluginOutputScopeTopics - ): PluginOutputScopeTopics | undefined { - if (a == null && b == null) return void 0 - if (a == null) return b - if (b == null) return a - return {...a, ...b} - } - - private mergeOutputScopeOptions( - a?: OutputScopeOptions, - b?: OutputScopeOptions - ): OutputScopeOptions | undefined { - if (a == null && b == null) return void 0 - if (a == null) return b - if (b == null) return a - - const mergedPlugins: Record = {} - for (const [pluginName, topics] of Object.entries(a.plugins ?? {})) { - if (topics != null) mergedPlugins[pluginName] = {...topics} - } - for (const [pluginName, topics] of Object.entries(b.plugins ?? {})) { - const mergedTopics = this.mergeOutputScopeTopics(mergedPlugins[pluginName], topics) - if (mergedTopics != null) mergedPlugins[pluginName] = mergedTopics - } - - if (Object.keys(mergedPlugins).length === 0) return {} - return {plugins: mergedPlugins} - } - - private mergeFrontMatterOptions( - a?: FrontMatterOptions, - b?: FrontMatterOptions - ): FrontMatterOptions | undefined { - if (a == null && b == null) return void 0 - if (a == null) return b - if (b == null) return a - return {...a, ...b} - } - - private mergeCleanupProtectionOptions( - a?: CleanupProtectionOptions, - b?: CleanupProtectionOptions - ): CleanupProtectionOptions | undefined { - if (a == null && b == null) return void 0 - if (a == null) return b - if (b == null) return a - - return { - rules: [ - ...a.rules ?? [], - ...b.rules ?? [] - ] - } - } - - private mergeWindowsOptions( - a?: WindowsOptions, - b?: WindowsOptions - ): WindowsOptions | undefined { - if (a == null && b == null) return void 0 - if (a == null) return b - if (b == null) return a - - return { - ...a, - ...b, - ...a.wsl2 != null || b.wsl2 != null - ? { - wsl2: { - ...a.wsl2, - ...b.wsl2 - } - } - : {} - } - } - - private resolveTilde(p: string): string { - return p.startsWith('~') ? resolveUserPath(p) : p - } -} - -/** - * Result of loading and merging all configurations - */ -export interface MergedConfigResult { - readonly config: UserConfigFile - - readonly sources: readonly string[] - - readonly found: boolean -} - -/** - * Singleton instance for convenience - */ -let defaultLoader: ConfigLoader | null = null - -/** - * Get or create the default ConfigLoader instance - */ -export function getConfigLoader(options?: ConfigLoaderOptions): ConfigLoader { - if (options || !defaultLoader) defaultLoader = new ConfigLoader(options) - return defaultLoader -} - -/** - * Load user configuration using default loader - */ -export function loadUserConfig(cwd?: string): MergedConfigResult { - return getConfigLoader().load(cwd) -} - -/** - * Validate global config file strictly. - * - If config doesn't exist: return invalid result (do not auto-create) - * - If config is invalid (parse error or validation error): return invalid result (do not recreate) - * - * @returns Validation result indicating whether program should continue or exit - */ -export function validateGlobalConfig(): GlobalConfigValidationResult { - const logger = createLogger('ConfigLoader') - let configPath: string - - try { - configPath = getRequiredGlobalConfigPath() - } - catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) - logger.error(buildConfigDiagnostic({ - code: 'GLOBAL_CONFIG_PATH_RESOLUTION_FAILED', - title: 'Failed to resolve global config path', - reason: diagnosticLines(errorMessage), - configPath: `${RUNTIME_DEFAULT_GLOBAL_CONFIG_DIR}/${RUNTIME_DEFAULT_CONFIG_FILE_NAME}`, - exactFix: diagnosticLines( - 'Ensure the required global config exists in the expected runtime-specific location before running tnmsc again.' - ) - })) - return { - valid: false, - exists: false, - errors: [errorMessage], - shouldExit: true - } - } - - if (!fs.existsSync(configPath)) { // Check if config file exists - do not auto-create - const error = `Global config not found at ${configPath}. Please create it manually.` - logger.error(buildConfigDiagnostic({ - code: 'GLOBAL_CONFIG_MISSING', - title: 'Global config file is missing', - reason: diagnosticLines( - `tnmsc could not find the required global config file at "${configPath}".` - ), - configPath, - exactFix: diagnosticLines( - 'Create the global config file manually before running tnmsc again.' - ), - possibleFixes: [ - diagnosticLines('Initialize the file with a valid JSON object, for example `{}`.') - ] - })) - return { - valid: false, - exists: false, - errors: [error], - shouldExit: true - } - } - - let content: string - try { - content = fs.readFileSync(configPath, 'utf8') - } - catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) - logger.error(buildFileOperationDiagnostic({ - code: 'GLOBAL_CONFIG_READ_FAILED', - title: 'Failed to read global config file', - operation: 'read', - targetKind: 'global config file', - path: configPath, - error: errorMessage - })) - return { - valid: false, - exists: true, - errors: [`Failed to read config: ${errorMessage}`], - shouldExit: true - } - } - - let parsed: unknown - try { - parsed = JSON.parse(content) - } - catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) - logger.error(buildConfigDiagnostic({ - code: 'GLOBAL_CONFIG_JSON_INVALID', - title: 'Global config contains invalid JSON', - reason: diagnosticLines( - `tnmsc could not parse the JSON in "${configPath}".`, - `Parser error: ${errorMessage}` - ), - configPath, - exactFix: diagnosticLines( - 'Fix the JSON syntax in the global config file so it parses as a single JSON object.' - ), - possibleFixes: [ - diagnosticLines('Validate the file with a JSON parser and remove trailing commas or invalid tokens.') - ] - })) - return { - valid: false, - exists: true, - errors: [`Invalid JSON: ${errorMessage}`], - shouldExit: true - } - } - - if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) { - logger.error(buildConfigDiagnostic({ - code: 'GLOBAL_CONFIG_NOT_OBJECT', - title: 'Global config must be a JSON object', - reason: diagnosticLines( - `tnmsc parsed "${configPath}" successfully, but the top-level value is not a JSON object.` - ), - configPath, - exactFix: diagnosticLines( - 'Replace the top-level JSON value with an object like `{}` or a valid config object.' - ) - })) - return { - valid: false, - exists: true, - errors: ['Config must be a JSON object'], - shouldExit: true - } - } - - const zodResult = ZUserConfigFile.safeParse(parsed) - if (!zodResult.success) { - const errors = zodResult.error.issues.map((i: {path: (string | number)[], message: string}) => `${i.path.join('.')}: ${i.message}`) - for (const err of errors) { - logger.error(buildConfigDiagnostic({ - code: 'GLOBAL_CONFIG_VALIDATION_FAILED', - title: 'Global config validation failed', - reason: splitDiagnosticText(err), - configPath, - exactFix: diagnosticLines( - 'Update the invalid config field so it matches the tnmsc schema.' - ), - possibleFixes: [ - diagnosticLines('Compare the field name and value against the current config schema or examples.') - ], - details: { - validationError: err - } - })) - } - return { - valid: false, - exists: true, - errors, - shouldExit: true - } - } - - return { - valid: true, - exists: true, - errors: [], - shouldExit: false - } -} diff --git a/cli/src/PluginPipeline.test.ts b/cli/src/PluginPipeline.test.ts deleted file mode 100644 index 27d12a4f..00000000 --- a/cli/src/PluginPipeline.test.ts +++ /dev/null @@ -1,60 +0,0 @@ -import type {PipelineConfig} from './config' -import type {OutputPlugin} from './plugins/plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from './config' -import {PluginPipeline} from './PluginPipeline' -import {createLogger, FilePathKind, PluginKind} from './plugins/plugin-core' - -describe('plugin pipeline output contexts', () => { - it('passes user config options into write contexts', async () => { - const tempDir = path.resolve('tmp/plugin-pipeline-frontmatter') - fs.rmSync(tempDir, {recursive: true, force: true}) - fs.mkdirSync(tempDir, {recursive: true}) - - const outputPath = path.join(tempDir, 'frontmatter.txt') - let seenBlankLineAfter: boolean | undefined - - const plugin: OutputPlugin = { - type: PluginKind.Output, - name: 'CaptureOutputPlugin', - log: createLogger('CaptureOutputPlugin', 'error'), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles(ctx) { - seenBlankLineAfter = ctx.pluginOptions?.frontMatter?.blankLineAfter - return [{path: outputPath, source: 'capture'}] - }, - async convertContent(_declaration, ctx) { - return String(ctx.pluginOptions?.frontMatter?.blankLineAfter) - } - } - - const config: PipelineConfig = { - context: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: tempDir, - getDirectoryName: () => path.basename(tempDir) - }, - projects: [] - } - }, - outputPlugins: [plugin], - userConfigOptions: mergeConfig({ - workspaceDir: tempDir, - frontMatter: { - blankLineAfter: false - } - }) - } - - const result = await new PluginPipeline('node', 'tnmsc').run(config) - - expect(result.success).toBe(true) - expect(seenBlankLineAfter).toBe(false) - expect(fs.readFileSync(outputPath, 'utf8')).toBe('false') - }) -}) diff --git a/cli/src/PluginPipeline.ts b/cli/src/PluginPipeline.ts deleted file mode 100644 index 652952ba..00000000 --- a/cli/src/PluginPipeline.ts +++ /dev/null @@ -1,101 +0,0 @@ -import type {ILogger, OutputCleanContext, OutputCollectedContext, OutputPlugin, OutputRuntimeTargets, OutputWriteContext, PluginOptions} from './plugins/plugin-core' -import type {Command, CommandContext, CommandResult} from '@/commands/Command' -import type {PipelineConfig} from '@/config' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {JsonOutputCommand} from '@/commands/JsonOutputCommand' -import {extractUserArgs, parseArgs, resolveCommand} from '@/pipeline/CliArgumentParser' -import {discoverOutputRuntimeTargets} from '@/pipeline/OutputRuntimeTargets' -import {createLogger, setGlobalLogLevel} from './plugins/plugin-core' - -/** - * Plugin Pipeline - Orchestrates plugin execution - * - * This class has been refactored to use modular components: - * - CliArgumentParser: CLI argument parsing (moved to @/pipeline) - * - DependencyResolver: dependency ordering (moved to @/pipeline) - * - ContextMerger: Context merging (moved to @/pipeline) - */ -export class PluginPipeline { - private readonly logger: ILogger - readonly args: ParsedCliArgs - private outputPlugins: OutputPlugin[] = [] - private runtimeTargets?: OutputRuntimeTargets - - constructor(...cmdArgs: (string | undefined)[]) { - const filtered = cmdArgs.filter((arg): arg is string => arg != null) - const userArgs = extractUserArgs(filtered) - this.args = parseArgs(userArgs) - - const resolvedLogLevel = this.args.logLevel // Resolve log level from parsed args and set globally - if (resolvedLogLevel != null) setGlobalLogLevel(resolvedLogLevel) - this.logger = createLogger('PluginPipeline', resolvedLogLevel) - this.logger.debug('initialized', {args: this.args}) - } - - registerOutputPlugins(plugins: OutputPlugin[]): this { - this.outputPlugins.push(...plugins) - return this - } - - async run(config: PipelineConfig): Promise { - const {context, outputPlugins, userConfigOptions} = config - this.registerOutputPlugins([...outputPlugins]) - - let command: Command = resolveCommand(this.args) - - if (this.args.jsonFlag) { - setGlobalLogLevel('silent') // Suppress all console logging in JSON mode - - const selfJsonCommands = new Set(['config-show', 'plugins']) // only need log suppression, not JsonOutputCommand wrapping // Commands that handle their own JSON output (config --show, plugins) - if (!selfJsonCommands.has(command.name)) command = new JsonOutputCommand(command) - } - - const commandCtx = this.createCommandContext(context, userConfigOptions) - return command.execute(commandCtx) - } - - private createCommandContext(ctx: OutputCollectedContext, userConfigOptions: Required): CommandContext { - return { - logger: this.logger, - outputPlugins: this.outputPlugins, - collectedOutputContext: ctx, - userConfigOptions, - createCleanContext: (dryRun: boolean) => this.createCleanContext(ctx, userConfigOptions, dryRun), - createWriteContext: (dryRun: boolean) => this.createWriteContext(ctx, userConfigOptions, dryRun) - } - } - - private createCleanContext( - ctx: OutputCollectedContext, - userConfigOptions: Required, - dryRun: boolean - ): OutputCleanContext { - return { - logger: this.logger, - collectedOutputContext: ctx, - pluginOptions: userConfigOptions, - runtimeTargets: this.getRuntimeTargets(), - dryRun - } - } - - private createWriteContext( - ctx: OutputCollectedContext, - userConfigOptions: Required, - dryRun: boolean - ): OutputWriteContext { - return { - logger: this.logger, - collectedOutputContext: ctx, - pluginOptions: userConfigOptions, - runtimeTargets: this.getRuntimeTargets(), - dryRun, - registeredPluginNames: this.outputPlugins.map(p => p.name) - } - } - - private getRuntimeTargets(): OutputRuntimeTargets { - this.runtimeTargets ??= discoverOutputRuntimeTargets(this.logger) - return this.runtimeTargets - } -} diff --git a/cli/src/ProtectedDeletionGuard.ts b/cli/src/ProtectedDeletionGuard.ts deleted file mode 100644 index f0644679..00000000 --- a/cli/src/ProtectedDeletionGuard.ts +++ /dev/null @@ -1,612 +0,0 @@ -import type {ILogger} from '@truenine/logger' -import type {OutputCollectedContext, PluginOptions} from './plugins/plugin-core' -import type {PublicDefinitionResolveOptions} from './public-config-paths' -import * as fs from 'node:fs' -import * as path from 'node:path' -import process from 'node:process' -import glob from 'fast-glob' -import {buildProtectedDeletionDiagnostic} from './diagnostics' -import { - AINDEX_CONFIG_DIRECTORY_PAIR_KEYS, - AINDEX_PROJECT_SERIES_NAMES -} from './plugins/plugin-core' -import {collectKnownPublicConfigDefinitionPaths} from './public-config-paths' -import {getEffectiveHomeDir, resolveUserPath} from './runtime-environment' - -interface DirPathLike { - readonly path: string - readonly pathKind?: string - readonly basePath?: string - readonly getAbsolutePath?: () => string -} - -export type ProtectionMode = 'direct' | 'recursive' -export type ProtectionRuleMatcher = 'path' | 'glob' - -export interface ProtectedPathRule { - readonly path: string - readonly protectionMode: ProtectionMode - readonly reason: string - readonly source: string - readonly matcher?: ProtectionRuleMatcher -} - -interface CompiledProtectedPathRule extends ProtectedPathRule { - readonly comparisonKeys: readonly string[] - readonly normalizedPath: string - readonly specificity: number -} - -export interface ProtectedPathViolation { - readonly targetPath: string - readonly protectedPath: string - readonly protectionMode: ProtectionMode - readonly reason: string - readonly source: string -} - -export interface ProtectedDeletionGuard { - readonly rules: readonly ProtectedPathRule[] - readonly exactProtectedPaths: readonly string[] - readonly subtreeProtectedPaths: readonly string[] - readonly compiledRules: readonly CompiledProtectedPathRule[] -} - -export interface ProtectedDeletionGuardOptions { - readonly workspaceDir?: string - readonly aindexDir?: string - readonly projectRoots?: readonly string[] - readonly exactProtectedPaths?: readonly string[] - readonly subtreeProtectedPaths?: readonly string[] - readonly rules?: readonly ProtectedPathRule[] - readonly includeReservedWorkspaceContentRoots?: boolean -} - -export class ProtectedDeletionGuardError extends Error { - readonly operation: string - - readonly violations: readonly ProtectedPathViolation[] - - constructor(operation: string, violations: readonly ProtectedPathViolation[]) { - super(buildProtectedDeletionGuardMessage(operation, violations)) - this.name = 'ProtectedDeletionGuardError' - this.operation = operation - this.violations = violations - } -} - -const CONFIGURED_AINDEX_FILE_KEYS = [ - 'globalPrompt', - 'workspacePrompt' -] as const satisfies readonly (keyof Required['aindex'])[] - -function resolveXdgConfigHome(homeDir: string): string { - const xdgConfigHome = process.env['XDG_CONFIG_HOME'] - if (typeof xdgConfigHome === 'string' && xdgConfigHome.trim().length > 0) return xdgConfigHome - return path.join(homeDir, '.config') -} - -function resolveXdgDataHome(homeDir: string): string { - const xdgDataHome = process.env['XDG_DATA_HOME'] - if (typeof xdgDataHome === 'string' && xdgDataHome.trim().length > 0) return xdgDataHome - return path.join(homeDir, '.local', 'share') -} - -function resolveXdgStateHome(homeDir: string): string { - const xdgStateHome = process.env['XDG_STATE_HOME'] - if (typeof xdgStateHome === 'string' && xdgStateHome.trim().length > 0) return xdgStateHome - return path.join(homeDir, '.local', 'state') -} - -function resolveXdgCacheHome(homeDir: string): string { - const xdgCacheHome = process.env['XDG_CACHE_HOME'] - if (typeof xdgCacheHome === 'string' && xdgCacheHome.trim().length > 0) return xdgCacheHome - return path.join(homeDir, '.cache') -} - -function resolveAbsolutePathFromDir(dir: DirPathLike | undefined): string | undefined { - if (dir == null) return void 0 - - if (typeof dir.getAbsolutePath === 'function') { - try { - const absolute = dir.getAbsolutePath() - if (absolute.length > 0) return path.resolve(absolute) - } - catch {} - } - - if (dir.pathKind === 'absolute') return path.resolve(dir.path) - if (typeof dir.basePath === 'string' && dir.basePath.length > 0) return path.resolve(dir.basePath, dir.path) - return void 0 -} - -export function expandHomePath(rawPath: string): string { - if (rawPath === '~' || rawPath.startsWith('~/') || rawPath.startsWith('~\\')) return resolveUserPath(rawPath) - return rawPath -} - -export function resolveAbsolutePath(rawPath: string): string { - return path.resolve(expandHomePath(rawPath)) -} - -function normalizeForComparison(rawPath: string): string { - const normalized = path.normalize(resolveAbsolutePath(rawPath)) - if (process.platform === 'win32') return normalized.toLowerCase() - return normalized -} - -function stripTrailingSeparator(rawPath: string): string { - const {root} = path.parse(rawPath) - if (rawPath === root) return rawPath - return rawPath.endsWith(path.sep) ? rawPath.slice(0, -1) : rawPath -} - -function isSameOrChildPath(candidate: string, parent: string): boolean { - const normalizedCandidate = stripTrailingSeparator(candidate) - const normalizedParent = stripTrailingSeparator(parent) - if (normalizedCandidate === normalizedParent) return true - return normalizedCandidate.startsWith(`${normalizedParent}${path.sep}`) -} - -export function buildComparisonKeys(rawPath: string): readonly string[] { - const absolute = resolveAbsolutePath(rawPath) - const keys = new Set([normalizeForComparison(absolute)]) - - try { - if (fs.existsSync(absolute)) { - const realPath = fs.realpathSync.native(absolute) - keys.add(normalizeForComparison(realPath)) - } - } - catch {} - - return [...keys] -} - -function createProtectedPathRule( - rawPath: string, - protectionMode: ProtectionMode, - reason: string, - source: string, - matcher: ProtectionRuleMatcher = 'path' -): ProtectedPathRule { - return { - path: resolveAbsolutePath(rawPath), - protectionMode, - reason, - source, - matcher - } -} - -function compileRule(rule: ProtectedPathRule): CompiledProtectedPathRule { - const normalizedPath = normalizeForComparison(rule.path) - return { - ...rule, - path: resolveAbsolutePath(rule.path), - comparisonKeys: buildComparisonKeys(rule.path), - normalizedPath, - specificity: stripTrailingSeparator(normalizedPath).length - } -} - -function dedupeAndCompileRules(rules: readonly ProtectedPathRule[]): CompiledProtectedPathRule[] { - const compiledByKey = new Map() - - for (const rule of rules) { - const compiled = compileRule(rule) - compiledByKey.set(`${compiled.protectionMode}:${compiled.normalizedPath}`, compiled) - } - - return [...compiledByKey.values()].sort((a, b) => { - const specificityDiff = b.specificity - a.specificity - if (specificityDiff !== 0) return specificityDiff - - if (a.protectionMode !== b.protectionMode) return a.protectionMode === 'recursive' ? -1 : 1 - return a.path.localeCompare(b.path) - }) -} - -function normalizeGlobPattern(pattern: string): string { - return resolveAbsolutePath(pattern).replaceAll('\\', '/') -} - -function expandProtectedPathRules(rules: readonly ProtectedPathRule[]): ProtectedPathRule[] { - const expandedRules: ProtectedPathRule[] = [] - - for (const rule of rules) { - if (rule.matcher !== 'glob') { - expandedRules.push(createProtectedPathRule(rule.path, rule.protectionMode, rule.reason, rule.source)) - continue - } - - const matchedPaths = glob.sync(normalizeGlobPattern(rule.path), { - onlyFiles: false, - dot: true, - absolute: true, - followSymbolicLinks: false - }) - - for (const matchedPath of matchedPaths) expandedRules.push(createProtectedPathRule(matchedPath, rule.protectionMode, rule.reason, rule.source)) - } - - return expandedRules -} - -function isRuleMatch(targetKey: string, ruleKey: string, protectionMode: ProtectionMode): boolean { - if (protectionMode === 'direct') return isSameOrChildPath(ruleKey, targetKey) - return isSameOrChildPath(targetKey, ruleKey) || isSameOrChildPath(ruleKey, targetKey) -} - -function detectPathProtectionMode(rawPath: string, fallback: ProtectionMode): ProtectionMode { - const absolutePath = resolveAbsolutePath(rawPath) - - try { - if (fs.existsSync(absolutePath) && fs.lstatSync(absolutePath).isDirectory()) return 'recursive' - } - catch {} - - return fallback -} - -function collectBuiltInDangerousPathRules(): ProtectedPathRule[] { - const homeDir = getEffectiveHomeDir() - - return [ - createProtectedPathRule(path.parse(homeDir).root, 'direct', 'built-in dangerous root path', 'built-in-dangerous-root'), - createProtectedPathRule(homeDir, 'direct', 'built-in dangerous home directory', 'built-in-dangerous-root'), - createProtectedPathRule(resolveXdgConfigHome(homeDir), 'direct', 'built-in dangerous config directory', 'built-in-dangerous-root'), - createProtectedPathRule(resolveXdgDataHome(homeDir), 'direct', 'built-in dangerous data directory', 'built-in-dangerous-root'), - createProtectedPathRule(resolveXdgStateHome(homeDir), 'direct', 'built-in dangerous state directory', 'built-in-dangerous-root'), - createProtectedPathRule(resolveXdgCacheHome(homeDir), 'direct', 'built-in dangerous cache directory', 'built-in-dangerous-root'), - createProtectedPathRule(path.join(homeDir, '.aindex'), 'direct', 'built-in global aindex directory', 'built-in-dangerous-root'), - createProtectedPathRule(path.join(homeDir, '.aindex', '.tnmsc.json'), 'direct', 'built-in global config file', 'built-in-config') - ] -} - -function collectWorkspaceReservedRules( - workspaceDir: string, - projectRoots: readonly string[], - includeReservedWorkspaceContentRoots: boolean -): ProtectedPathRule[] { - const rules: ProtectedPathRule[] = [ - createProtectedPathRule(workspaceDir, 'direct', 'workspace root', 'workspace-reserved'), - createProtectedPathRule(path.join(workspaceDir, 'aindex'), 'direct', 'reserved workspace aindex root', 'workspace-reserved'), - createProtectedPathRule(path.join(workspaceDir, 'knowladge'), 'direct', 'reserved workspace knowladge root', 'workspace-reserved') - ] - - for (const projectRoot of projectRoots) rules.push(createProtectedPathRule(projectRoot, 'direct', 'workspace project root', 'workspace-project-root')) - - if (!includeReservedWorkspaceContentRoots) return rules - - rules.push(createProtectedPathRule( - path.join(workspaceDir, 'aindex', 'dist', '**', '*.mdx'), - 'direct', - 'reserved workspace aindex dist mdx files', - 'workspace-reserved', - 'glob' - )) - for (const seriesName of AINDEX_PROJECT_SERIES_NAMES) { - rules.push(createProtectedPathRule( - path.join(workspaceDir, 'aindex', seriesName, '**', '*.mdx'), - 'direct', - `reserved workspace aindex ${seriesName} mdx files`, - 'workspace-reserved', - 'glob' - )) - } - return rules -} - -function collectResolvedAindexRules(aindexDir: string): ProtectedPathRule[] { - return [createProtectedPathRule(aindexDir, 'direct', 'resolved aindex root', 'aindex-root')] -} - -export function collectKnownAindexInputConfigPaths( - aindexDir: string, - resolveOptions?: PublicDefinitionResolveOptions -): string[] { - return collectKnownPublicConfigDefinitionPaths(aindexDir, resolveOptions) -} - -export function collectConfiguredAindexInputRules( - pluginOptions: Required, - aindexDir: string, - resolveOptions?: PublicDefinitionResolveOptions -): ProtectedPathRule[] { - const rules: ProtectedPathRule[] = [] - - for (const key of AINDEX_CONFIG_DIRECTORY_PAIR_KEYS) { - const configuredDir = pluginOptions.aindex[key] - if (configuredDir == null) continue - - rules.push( - createProtectedPathRule( - path.join(aindexDir, configuredDir.src), - 'recursive', - `configured aindex ${key} source directory`, - 'configured-aindex-source' - ) - ) - } - - for (const key of CONFIGURED_AINDEX_FILE_KEYS) { - const configuredFile = pluginOptions.aindex[key] - if (configuredFile == null) continue - - rules.push( - createProtectedPathRule( - path.join(aindexDir, configuredFile.src), - 'direct', - `configured aindex ${key} source file`, - 'configured-aindex-source' - ) - ) - } - - for (const protectedPath of collectKnownAindexInputConfigPaths(aindexDir, resolveOptions)) { - rules.push( - createProtectedPathRule( - protectedPath, - 'direct', - 'known aindex input config file', - 'known-aindex-config' - ) - ) - } - - return rules -} - -export function collectConfiguredAindexInputPaths( - pluginOptions: Required, - aindexDir: string, - resolveOptions?: PublicDefinitionResolveOptions -): string[] { - return collectConfiguredAindexInputRules(pluginOptions, aindexDir, resolveOptions).map(rule => rule.path) -} - -export function collectProtectedInputSourceRules( - collectedOutputContext: OutputCollectedContext -): ProtectedPathRule[] { - const rules: ProtectedPathRule[] = [] - const seen = new Set() - - const addRule = ( - rawPath: string | undefined, - protectionMode: ProtectionMode, - reason: string, - source: string - ): void => { - if (rawPath == null || rawPath.length === 0) return - - const rule = createProtectedPathRule(rawPath, protectionMode, reason, source) - const dedupeKey = `${rule.protectionMode}:${normalizeForComparison(rule.path)}` - if (seen.has(dedupeKey)) return - - seen.add(dedupeKey) - rules.push(rule) - } - - const addRuleFromDir = ( - dir: DirPathLike | undefined, - protectionMode: ProtectionMode, - reason: string, - source: string - ): void => { - const resolved = resolveAbsolutePathFromDir(dir) - if (resolved == null) return - addRule(resolved, protectionMode, reason, source) - } - - addRuleFromDir(collectedOutputContext.globalMemory?.dir as DirPathLike | undefined, 'recursive', 'global memory source directory', 'collected-input-source') - - for (const command of collectedOutputContext.commands ?? []) { - addRuleFromDir(command.dir as DirPathLike | undefined, 'recursive', 'command source directory', 'collected-input-source') - } - - for (const subAgent of collectedOutputContext.subAgents ?? []) { - addRuleFromDir(subAgent.dir as DirPathLike | undefined, 'recursive', 'sub-agent source directory', 'collected-input-source') - } - - for (const rule of collectedOutputContext.rules ?? []) { - addRuleFromDir(rule.dir as DirPathLike | undefined, 'recursive', 'rule source directory', 'collected-input-source') - } - - for (const skill of collectedOutputContext.skills ?? []) { - addRuleFromDir(skill.dir as DirPathLike | undefined, 'recursive', 'skill source directory', 'collected-input-source') - for (const childDoc of skill.childDocs ?? []) { - addRuleFromDir(childDoc.dir as DirPathLike | undefined, 'recursive', 'skill child document directory', 'collected-input-source') - } - for (const resource of skill.resources ?? []) { - if (resource.sourcePath == null || resource.sourcePath.length === 0) continue - addRule( - resource.sourcePath, - detectPathProtectionMode(resource.sourcePath, 'direct'), - 'skill resource source path', - 'collected-input-source' - ) - } - } - - for (const config of collectedOutputContext.vscodeConfigFiles ?? []) { - addRuleFromDir(config.dir as DirPathLike | undefined, 'direct', 'vscode input config file', 'collected-input-config') - } - - for (const config of collectedOutputContext.zedConfigFiles ?? []) { - addRuleFromDir(config.dir as DirPathLike | undefined, 'direct', 'zed input config file', 'collected-input-config') - } - - for (const config of collectedOutputContext.jetbrainsConfigFiles ?? []) { - addRuleFromDir(config.dir as DirPathLike | undefined, 'direct', 'jetbrains input config file', 'collected-input-config') - } - - for (const config of collectedOutputContext.editorConfigFiles ?? []) { - addRuleFromDir(config.dir as DirPathLike | undefined, 'direct', 'editorconfig input file', 'collected-input-config') - } - - for (const ignoreFile of collectedOutputContext.aiAgentIgnoreConfigFiles ?? []) { - addRule(ignoreFile.sourcePath, 'direct', 'AI agent ignore config file', 'collected-input-config') - } - - if (collectedOutputContext.aindexDir != null) { - for (const protectedPath of collectKnownAindexInputConfigPaths(collectedOutputContext.aindexDir, { - workspaceDir: collectedOutputContext.workspace.directory.path - })) { - addRule(protectedPath, 'direct', 'known aindex input config file', 'known-aindex-config') - } - } - - return rules -} - -export function collectProtectedInputSourcePaths(collectedOutputContext: OutputCollectedContext): string[] { - return collectProtectedInputSourceRules(collectedOutputContext).map(rule => rule.path) -} - -function collectLegacyCompatibilityRules(options: ProtectedDeletionGuardOptions): ProtectedPathRule[] { - const rules: ProtectedPathRule[] = [] - - for (const protectedPath of options.exactProtectedPaths ?? []) { - rules.push(createProtectedPathRule(protectedPath, 'direct', 'legacy direct protected path', 'legacy-direct')) - } - - for (const protectedPath of options.subtreeProtectedPaths ?? []) { - rules.push(createProtectedPathRule(protectedPath, 'recursive', 'legacy recursive protected path', 'legacy-recursive')) - } - - return rules -} - -export function createProtectedDeletionGuard( - options: ProtectedDeletionGuardOptions = {} -): ProtectedDeletionGuard { - const includeReservedWorkspaceContentRoots = options.includeReservedWorkspaceContentRoots ?? true - const rules: ProtectedPathRule[] = [ - ...collectBuiltInDangerousPathRules(), - ...collectLegacyCompatibilityRules(options), - ...options.workspaceDir != null - ? collectWorkspaceReservedRules( - options.workspaceDir, - options.projectRoots ?? [], - includeReservedWorkspaceContentRoots - ) - : [], - ...options.aindexDir != null ? collectResolvedAindexRules(options.aindexDir) : [], - ...options.rules ?? [] - ] - const compiledRules = dedupeAndCompileRules(expandProtectedPathRules(rules)) - - return { - rules: compiledRules.map(rule => ({ - path: rule.path, - protectionMode: rule.protectionMode, - reason: rule.reason, - source: rule.source, - ...rule.matcher != null ? {matcher: rule.matcher} : {} - })), - exactProtectedPaths: compiledRules - .filter(rule => rule.protectionMode === 'direct') - .map(rule => rule.path), - subtreeProtectedPaths: compiledRules - .filter(rule => rule.protectionMode === 'recursive') - .map(rule => rule.path), - compiledRules - } -} - -export function collectProjectRoots(collectedOutputContext: OutputCollectedContext): string[] { - const projectRoots = new Set() - - for (const project of collectedOutputContext.workspace.projects) { - if (project.isWorkspaceRootProject === true) continue - const absolutePath = project.dirFromWorkspacePath?.getAbsolutePath?.() - if (absolutePath != null && absolutePath.length > 0) projectRoots.add(resolveAbsolutePath(absolutePath)) - } - - return [...projectRoots] -} - -function selectMoreSpecificRule( - candidate: CompiledProtectedPathRule, - current: CompiledProtectedPathRule | undefined -): CompiledProtectedPathRule { - if (current == null) return candidate - if (candidate.specificity !== current.specificity) return candidate.specificity > current.specificity ? candidate : current - if (candidate.protectionMode !== current.protectionMode) return candidate.protectionMode === 'recursive' ? candidate : current - return candidate.path.localeCompare(current.path) < 0 ? candidate : current -} - -export function getProtectedPathViolation( - targetPath: string, - guard: ProtectedDeletionGuard -): ProtectedPathViolation | undefined { - const absoluteTargetPath = resolveAbsolutePath(targetPath) - const targetKeys = buildComparisonKeys(absoluteTargetPath) - let matchedRule: CompiledProtectedPathRule | undefined - - for (const rule of guard.compiledRules) { - let didMatch = false - - for (const targetKey of targetKeys) { - for (const ruleKey of rule.comparisonKeys) { - if (!isRuleMatch(targetKey, ruleKey, rule.protectionMode)) continue - matchedRule = selectMoreSpecificRule(rule, matchedRule) - didMatch = true - break - } - - if (didMatch) break - } - } - - if (matchedRule == null) return void 0 - - return { - targetPath: absoluteTargetPath, - protectedPath: matchedRule.path, - protectionMode: matchedRule.protectionMode, - reason: matchedRule.reason, - source: matchedRule.source - } -} - -export function partitionDeletionTargets( - targetPaths: readonly string[], - guard: ProtectedDeletionGuard -): {safePaths: string[], violations: ProtectedPathViolation[]} { - const safePaths: string[] = [] - const violationsByTargetPath = new Map() - - for (const targetPath of targetPaths) { - const absoluteTargetPath = resolveAbsolutePath(targetPath) - const violation = getProtectedPathViolation(absoluteTargetPath, guard) - if (violation == null) { - safePaths.push(absoluteTargetPath) - continue - } - - if (!violationsByTargetPath.has(violation.targetPath)) violationsByTargetPath.set(violation.targetPath, violation) - } - - return { - safePaths, - violations: [...violationsByTargetPath.values()].sort((a, b) => a.targetPath.localeCompare(b.targetPath)) - } -} - -export function buildProtectedDeletionGuardMessage( - operation: string, - violations: readonly ProtectedPathViolation[] -): string { - const pathList = violations.map(violation => violation.targetPath).join(', ') - return `Protected deletion guard blocked ${operation} for ${violations.length} path(s): ${pathList}` -} - -export function logProtectedDeletionGuardError( - logger: ILogger, - operation: string, - violations: readonly ProtectedPathViolation[] -): void { - logger.error(buildProtectedDeletionDiagnostic(operation, violations)) -} diff --git a/cli/src/aindex-config/AindexProjectConfig.ts b/cli/src/aindex-config/AindexProjectConfig.ts deleted file mode 100644 index 82ea42f5..00000000 --- a/cli/src/aindex-config/AindexProjectConfig.ts +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Configuration for empty directory cleanup in aindex projects. - */ -export interface AindexEmptyDirCleanupConfig { - /** Git-style glob patterns to exclude from empty directory cleanup. */ - readonly exclude?: readonly string[] -} - -/** - * Project-level configuration for aindex. - * This is loaded from aindex/aindex.config.ts - */ -export interface AindexProjectConfig { - readonly emptyDirCleanup?: AindexEmptyDirCleanupConfig -} - -export interface AindexProjectConfigLoadResult { - readonly config: AindexProjectConfig - readonly source: string | null - readonly found: boolean -} - -export const DEFAULT_EMPTY_DIR_CLEANUP_CONFIG: AindexEmptyDirCleanupConfig = { - exclude: [] -} - -export function defineAindexProjectConfig(config: AindexProjectConfig): AindexProjectConfig { - return config -} diff --git a/cli/src/aindex-config/AindexProjectConfigLoader.ts b/cli/src/aindex-config/AindexProjectConfigLoader.ts deleted file mode 100644 index b77d388f..00000000 --- a/cli/src/aindex-config/AindexProjectConfigLoader.ts +++ /dev/null @@ -1,88 +0,0 @@ -import type {ILogger} from '@truenine/logger' -import type {AindexProjectConfig, AindexProjectConfigLoadResult} from './AindexProjectConfig' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {createLogger} from '@truenine/logger' - -const CONFIG_FILE_NAMES = ['aindex.config.ts', 'aindex.config.mts', 'aindex.config.cts', 'aindex.config.js', 'aindex.config.mjs', 'aindex.config.cjs'] - -const DEFAULT_CONFIG: AindexProjectConfig = { - emptyDirCleanup: { - exclude: [] - } -} - -export class AindexProjectConfigLoader { - private readonly logger: ILogger - - constructor() { - this.logger = createLogger('AindexProjectConfigLoader') - } - - async loadFromDirectory(dirPath: string): Promise { - for (const configName of CONFIG_FILE_NAMES) { - const configPath = path.join(dirPath, configName) - if (fs.existsSync(configPath)) { - return this.loadFromFile(configPath) - } - } - return {config: DEFAULT_CONFIG, source: null, found: false} - } - - async loadFromFile(filePath: string): Promise { - try { - const resolvedPath = path.resolve(filePath) - - if (!fs.existsSync(resolvedPath)) { - return {config: DEFAULT_CONFIG, source: null, found: false} - } - - const mod = (await import(resolvedPath)) as Record - const rawConfig = mod != null && typeof mod === 'object' ? 'default' in mod ? mod['default'] : 'config' in mod ? mod['config'] : mod : mod - - const config = this.normalizeConfig(rawConfig) - this.logger.debug('aindex project config loaded', {source: resolvedPath}) - return {config, source: resolvedPath, found: true} - } catch (error) { - this.logger.warn({ - code: 'AINDEX_CONFIG_LOAD_FAILED', - title: 'aindex project config load failed', - rootCause: [error instanceof Error ? error.message : String(error)], - details: {path: filePath} - }) - return {config: DEFAULT_CONFIG, source: null, found: false} - } - } - - private normalizeConfig(raw: unknown): AindexProjectConfig { - if (raw == null || typeof raw !== 'object') return DEFAULT_CONFIG - const obj = raw as Record - - const edc = obj['emptyDirCleanup'] - if (edc != null && typeof edc !== 'object') return {} - - const edcObj = edc as Record - return { - emptyDirCleanup: { - exclude: toStringArray(edcObj['exclude']) - } - } - } -} - -function toStringArray(val: unknown): string[] { - if (Array.isArray(val)) return val.filter((x): x is string => typeof x === 'string') - if (typeof val === 'string') return [val] - return [] -} - -let defaultLoader: AindexProjectConfigLoader | null = null - -export function getAindexProjectConfigLoader(): AindexProjectConfigLoader { - defaultLoader ??= new AindexProjectConfigLoader() - return defaultLoader -} - -export async function loadAindexProjectConfig(dirPath: string): Promise { - return getAindexProjectConfigLoader().loadFromDirectory(dirPath) -} diff --git a/cli/src/aindex-config/index.ts b/cli/src/aindex-config/index.ts deleted file mode 100644 index 9489c4fc..00000000 --- a/cli/src/aindex-config/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from './AindexProjectConfig' -export * from './AindexProjectConfigLoader' diff --git a/cli/src/aindex-project-series.ts b/cli/src/aindex-project-series.ts deleted file mode 100644 index 0cfa3ddf..00000000 --- a/cli/src/aindex-project-series.ts +++ /dev/null @@ -1,72 +0,0 @@ -import type {AindexProjectSeriesName, PluginOptions} from '@/plugins/plugin-core' -import {AINDEX_PROJECT_SERIES_NAMES} from '@/plugins/plugin-core' - -export interface AindexProjectSeriesConfig { - readonly name: AindexProjectSeriesName - readonly src: string - readonly dist: string -} - -export interface AindexProjectSeriesProjectRef { - readonly projectName: string - readonly seriesName: AindexProjectSeriesName - readonly seriesDir: string -} - -export interface AindexProjectSeriesProjectNameConflict { - readonly projectName: string - readonly refs: readonly AindexProjectSeriesProjectRef[] -} - -type AindexProjectSeriesOptions = Required['aindex'] - -export function isAindexProjectSeriesName(value: string): value is AindexProjectSeriesName { - return AINDEX_PROJECT_SERIES_NAMES.includes(value as AindexProjectSeriesName) -} - -export function resolveAindexProjectSeriesConfigs( - options: Required -): readonly AindexProjectSeriesConfig[] { - return AINDEX_PROJECT_SERIES_NAMES.map(name => buildAindexProjectSeriesConfig(options.aindex, name)) -} - -export function resolveAindexProjectSeriesConfig( - options: Required, - seriesName: AindexProjectSeriesName -): AindexProjectSeriesConfig { - return buildAindexProjectSeriesConfig(options.aindex, seriesName) -} - -export function collectAindexProjectSeriesProjectNameConflicts( - refs: readonly AindexProjectSeriesProjectRef[] -): readonly AindexProjectSeriesProjectNameConflict[] { - const refsByProjectName = new Map() - - for (const ref of refs) { - const existingRefs = refsByProjectName.get(ref.projectName) - if (existingRefs == null) refsByProjectName.set(ref.projectName, [ref]) - else existingRefs.push(ref) - } - - return Array.from(refsByProjectName.entries(), ([projectName, projectRefs]) => ({ - projectName, - refs: [...projectRefs] - .sort((left, right) => left.seriesName.localeCompare(right.seriesName)) - })) - .filter(conflict => { - const uniqueSeriesNames = new Set(conflict.refs.map(ref => ref.seriesName)) - return uniqueSeriesNames.size > 1 - }) - .sort((left, right) => left.projectName.localeCompare(right.projectName)) -} - -function buildAindexProjectSeriesConfig( - aindexOptions: AindexProjectSeriesOptions, - seriesName: AindexProjectSeriesName -): AindexProjectSeriesConfig { - return { - name: seriesName, - src: aindexOptions[seriesName].src, - dist: aindexOptions[seriesName].dist - } -} diff --git a/cli/src/bridge/mod.rs b/cli/src/bridge/mod.rs deleted file mode 100644 index ab8b3f48..00000000 --- a/cli/src/bridge/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -//! Node.js bridge — spawns Node.js child process for plugin runtime commands. - -pub mod node; diff --git a/cli/src/bridge/node.rs b/cli/src/bridge/node.rs deleted file mode 100644 index 01e9e7c0..00000000 --- a/cli/src/bridge/node.rs +++ /dev/null @@ -1,549 +0,0 @@ -//! Node.js process spawning for plugin runtime commands. -//! -//! Locates the bundled JS entry point and spawns `node` to execute -//! plugin-dependent commands (execute, dry-run, clean, plugins). - -use std::path::{Path, PathBuf}; -use std::process::{Command, ExitCode, Stdio}; -use std::sync::{Mutex, OnceLock}; - -use crate::{ - BridgeCommandResult, CliError, - diagnostic_helpers::{diagnostic, line, optional_details}, -}; - -use serde_json::Value; -use tnmsc_logger::create_logger; - -/// Strip Windows extended-length path prefix (`\\?\`) which Node.js cannot handle. -fn strip_win_prefix(path: PathBuf) -> PathBuf { - let s = path.to_string_lossy(); - if let Some(stripped) = s.strip_prefix(r"\\?\") { - PathBuf::from(stripped) - } else { - path - } -} - -const PACKAGE_NAME: &str = "@truenine/memory-sync-cli"; -static PLUGIN_RUNTIME_CACHE: OnceLock>> = OnceLock::new(); -static NODE_CACHE: OnceLock>> = OnceLock::new(); - -fn read_cached_success(cache: &Mutex>) -> Option { - match cache.lock() { - Ok(guard) => guard.clone(), - Err(poisoned) => poisoned.into_inner().clone(), - } -} - -fn store_cached_success(cache: &Mutex>, value: &T) { - match cache.lock() { - Ok(mut guard) => { - *guard = Some(value.clone()); - } - Err(poisoned) => { - *poisoned.into_inner() = Some(value.clone()); - } - } -} - -fn detect_with_cached_success(cache: &Mutex>, detect: F) -> Option -where - F: FnOnce() -> Option, -{ - if let Some(cached) = read_cached_success(cache) { - return Some(cached); - } - - let detected = detect(); - if let Some(value) = detected.as_ref() { - store_cached_success(cache, value); - } - detected -} - -/// Locate the plugin runtime JS entry point. -/// -/// Search order: -/// 1. `/plugin-runtime.mjs` (release archive: binary + JS co-located) -/// 2. `/../dist/plugin-runtime.mjs` (dev mode: cli/dist/) -/// 3. `/../cli/dist/plugin-runtime.mjs` (dev mode from repo root) -/// 4. `/dist/plugin-runtime.mjs` (fallback) -/// 5. `/cli/dist/plugin-runtime.mjs` (fallback from repo root cwd) -/// 6. npm/pnpm global install: `/@truenine/memory-sync-cli/dist/plugin-runtime.mjs` -/// 7. Embedded JS extracted to `~/.aindex/.cache/plugin-runtime-.mjs` -pub(crate) fn find_plugin_runtime() -> Option { - let cache = PLUGIN_RUNTIME_CACHE.get_or_init(|| Mutex::new(None)); - detect_with_cached_success(cache, detect_plugin_runtime) -} - -fn detect_plugin_runtime() -> Option { - let mut candidates: Vec = Vec::new(); - - // Relative to binary location - if let Ok(exe) = std::env::current_exe() - && let Some(exe_dir) = exe.parent() - { - candidates.push(exe_dir.join("plugin-runtime.mjs")); - candidates.push(exe_dir.join("../dist/plugin-runtime.mjs")); - candidates.push(exe_dir.join("../cli/dist/plugin-runtime.mjs")); - } - - // Relative to CWD - if let Ok(cwd) = std::env::current_dir() { - candidates.push(cwd.join("dist/plugin-runtime.mjs")); - candidates.push(cwd.join("cli/dist/plugin-runtime.mjs")); - } - - // npm/pnpm global package locations - for global_root in find_npm_global_roots() { - candidates.push( - global_root - .join(PACKAGE_NAME) - .join("dist/plugin-runtime.mjs"), - ); - } - - for candidate in &candidates { - let normalized = candidate - .canonicalize() - .ok() - .unwrap_or_else(|| candidate.clone()); - if normalized.exists() { - return Some(strip_win_prefix(normalized)); - } - } - - // Last resort: extract embedded JS to cache - extract_embedded_runtime() -} - -/// Find pnpm/npm global node_modules roots. -fn find_npm_global_roots() -> Vec { - let mut roots = Vec::new(); - - // `pnpm root -g` output (preferred) - if let Some(path) = run_silent("pnpm", &["root", "-g"]) { - roots.push(PathBuf::from(path)); - } - - // `npm root -g` output - if let Some(path) = run_silent("npm", &["root", "-g"]) { - roots.push(PathBuf::from(path)); - } - - // Common fallback locations (pnpm first) - if let Some(home) = dirs::home_dir() { - roots.push(home.join("AppData/Local/pnpm/global/5/node_modules")); - roots.push(home.join("AppData/Local/pnpm/global/node_modules")); - roots.push(home.join(".local/share/pnpm/global/5/node_modules")); - roots.push(home.join(".local/share/pnpm/global/node_modules")); - roots.push(home.join("AppData/Roaming/npm/node_modules")); - roots.push(home.join(".npm-global/lib/node_modules")); - } - - // nvm-managed node paths - #[cfg(not(windows))] - if let Some(home) = dirs::home_dir() { - let nvm_dir = home.join(".nvm/versions/node"); - if let Ok(entries) = std::fs::read_dir(&nvm_dir) { - for entry in entries.flatten() { - roots.push(entry.path().join("lib/node_modules")); - } - } - } - - roots -} - -/// Run a command silently and return trimmed stdout. -fn run_silent(cmd: &str, args: &[&str]) -> Option { - Command::new(cmd) - .args(args) - .stdout(Stdio::piped()) - .stderr(Stdio::null()) - .output() - .ok() - .and_then(|o| { - if o.status.success() { - String::from_utf8(o.stdout) - .ok() - .map(|s| s.trim().to_string()) - } else { - None - } - }) - .filter(|s| !s.is_empty()) -} - -/// Embedded plugin-runtime.mjs content (set by build.rs, empty if not available). -/// This allows the standalone binary to work without an external JS file. -#[cfg(feature = "embedded-runtime")] -const EMBEDDED_RUNTIME: &str = include_str!(concat!(env!("OUT_DIR"), "/plugin-runtime.mjs")); - -/// Extract embedded JS to `~/.aindex/.cache/plugin-runtime-.mjs`. -#[cfg(not(feature = "embedded-runtime"))] -fn extract_embedded_runtime() -> Option { - None -} - -/// Extract embedded JS to `~/.aindex/.cache/plugin-runtime-.mjs`. -#[cfg(feature = "embedded-runtime")] -fn extract_embedded_runtime() -> Option { - let version = env!("CARGO_PKG_VERSION"); - let cache_dir = dirs::home_dir()?.join(".aindex/.cache"); - let cache_file = cache_dir.join(format!("plugin-runtime-{version}.mjs")); - - // Already extracted and up-to-date - if cache_file.exists() { - return Some(cache_file); - } - - // Extract - std::fs::create_dir_all(&cache_dir).ok()?; - std::fs::write(&cache_file, EMBEDDED_RUNTIME).ok()?; - Some(cache_file) -} - -/// Find the `node` executable. -pub(crate) fn find_node() -> Option { - let cache = NODE_CACHE.get_or_init(|| Mutex::new(None)); - detect_with_cached_success(cache, detect_node) -} - -fn detect_node() -> Option { - // Try `node` in PATH - if Command::new("node") - .arg("--version") - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .status() - .is_ok() - { - return Some("node".to_string()); - } - None -} - -/// Run a Node.js plugin runtime command. -/// -/// Spawns: `node [--json] [extra_args...]` -/// Inherits stdin/stdout/stderr so the Node.js process output goes directly to terminal. -pub fn run_node_command(subcommand: &str, json_mode: bool, extra_args: &[&str]) -> ExitCode { - let logger = create_logger("NodeBridge", None); - - // Find node - let node = match find_node() { - Some(n) => n, - None => { - logger.error(diagnostic( - "NODE_RUNTIME_NOT_FOUND", - "Node.js runtime is required", - line("The `node` executable was not found in PATH."), - Some(line( - "Install Node.js and reopen this shell so `node --version` succeeds.", - )), - Some(vec![line( - "If Node.js is already installed, add its install directory to PATH.", - )]), - optional_details(serde_json::json!({ "subcommand": subcommand })), - )); - return ExitCode::FAILURE; - } - }; - - // Find plugin runtime - let runtime_path = match find_plugin_runtime() { - Some(p) => p, - None => { - logger.error(diagnostic( - "PLUGIN_RUNTIME_NOT_FOUND", - "Plugin runtime entry is missing", - line("No `plugin-runtime.mjs` file was found in the expected locations."), - Some(line( - "Build or install `@truenine/memory-sync-cli` so `plugin-runtime.mjs` is available.", - )), - Some(vec![line( - "Run `pnpm -F @truenine/memory-sync-cli build` in the repository.", - )]), - optional_details(serde_json::json!({ "subcommand": subcommand })), - )); - logger.debug( - Value::String("Searched: binary dir, CWD, npm/pnpm global, embedded cache".into()), - None, - ); - return ExitCode::FAILURE; - } - }; - - logger.debug( - Value::String("spawning node process".into()), - Some(serde_json::json!({ - "node": &node, - "runtime": runtime_path.to_string_lossy(), - "subcommand": subcommand, - "json": json_mode - })), - ); - - let mut cmd = Command::new(&node); - cmd.arg(&runtime_path); - cmd.arg(subcommand); - - if json_mode { - cmd.arg("--json"); - } - - for arg in extra_args { - cmd.arg(arg); - } - - // Inherit stdio so Node.js output goes directly to terminal - cmd.stdin(Stdio::inherit()); - cmd.stdout(Stdio::inherit()); - cmd.stderr(Stdio::inherit()); - - match cmd.status() { - Ok(status) => { - if status.success() { - ExitCode::SUCCESS - } else { - ExitCode::from(status.code().unwrap_or(1) as u8) - } - } - Err(e) => { - logger.error(diagnostic( - "NODE_PROCESS_SPAWN_FAILED", - "Failed to start the Node.js subprocess", - line("The CLI could not spawn the `node` process."), - Some(line( - "Check that `node` is runnable in this shell and retry.", - )), - None, - optional_details(serde_json::json!({ - "subcommand": subcommand, - "error": e.to_string() - })), - )); - ExitCode::FAILURE - } - } -} - -/// Library mode: capture Node.js subprocess output and return structured result. -/// -/// Used by GUI backend and other Rust callers via [`crate::run_bridge_command`]. -/// Unlike [`run_node_command`] which inherits stdio for CLI terminal use, -/// this variant pipes stdout/stderr so the caller can inspect the output. -pub fn run_node_command_captured( - subcommand: &str, - cwd: &Path, - json_mode: bool, - extra_args: &[&str], -) -> Result { - let node = find_node().ok_or(CliError::NodeNotFound)?; - let runtime_path = find_plugin_runtime() - .ok_or_else(|| CliError::PluginRuntimeNotFound( - "plugin-runtime.mjs not found. Install via 'pnpm add -g @truenine/memory-sync-cli' or place plugin-runtime.mjs next to the binary.".into(), - ))?; - - let mut cmd = Command::new(&node); - cmd.arg(&runtime_path); - cmd.arg(subcommand); - - if json_mode { - cmd.arg("--json"); - } - - for arg in extra_args { - cmd.arg(arg); - } - - cmd.current_dir(cwd); - cmd.stdout(Stdio::piped()); - cmd.stderr(Stdio::piped()); - - let output = cmd.output()?; - - let exit_code = output.status.code().unwrap_or(-1); - let stdout = String::from_utf8_lossy(&output.stdout).to_string(); - let stderr = String::from_utf8_lossy(&output.stderr).to_string(); - - if output.status.success() || (json_mode && !stdout.trim().is_empty()) { - Ok(BridgeCommandResult { - stdout, - stderr, - exit_code, - }) - } else { - Err(CliError::NodeProcessFailed { - code: exit_code, - stderr, - }) - } -} - -/// Run the fallback: spawn `node ` with full process.argv passthrough. -/// Used when plugin-runtime.mjs is not available but index.mjs is. -#[allow(dead_code)] -pub fn run_node_fallback(args: &[String]) -> ExitCode { - let logger = create_logger("NodeBridge", None); - - let node = match find_node() { - Some(n) => n, - None => { - logger.error(diagnostic( - "NODE_RUNTIME_NOT_FOUND", - "Node.js runtime is required", - line("The `node` executable was not found in PATH."), - Some(line( - "Install Node.js and reopen this shell so `node --version` succeeds.", - )), - Some(vec![line( - "If Node.js is already installed, add its install directory to PATH.", - )]), - optional_details(serde_json::json!({ "args": args })), - )); - return ExitCode::FAILURE; - } - }; - - // Find index.mjs (the existing TS CLI entry) - let index_path = find_index_mjs(); - let runtime = match index_path { - Some(p) => p, - None => { - logger.error(diagnostic( - "CLI_ENTRY_NOT_FOUND", - "CLI JavaScript entry is missing", - line("No `index.mjs` entry point was found for the fallback Node.js launcher."), - Some(line( - "Build `@truenine/memory-sync-cli` before running the fallback launcher.", - )), - Some(vec![line( - "Run `pnpm -F @truenine/memory-sync-cli build` in the repository.", - )]), - optional_details(serde_json::json!({ "args": args })), - )); - return ExitCode::FAILURE; - } - }; - - let mut cmd = Command::new(&node); - cmd.arg(&runtime); - for arg in args { - cmd.arg(arg); - } - cmd.stdin(Stdio::inherit()); - cmd.stdout(Stdio::inherit()); - cmd.stderr(Stdio::inherit()); - - match cmd.status() { - Ok(status) => { - if status.success() { - ExitCode::SUCCESS - } else { - ExitCode::from(status.code().unwrap_or(1) as u8) - } - } - Err(e) => { - logger.error(diagnostic( - "NODE_PROCESS_SPAWN_FAILED", - "Failed to start the Node.js subprocess", - line("The CLI could not spawn the `node` process."), - Some(line( - "Check that `node` is runnable in this shell and retry.", - )), - None, - optional_details(serde_json::json!({ - "args": args, - "error": e.to_string() - })), - )); - ExitCode::FAILURE - } - } -} - -#[allow(dead_code)] -fn find_index_mjs() -> Option { - let candidates: Vec = { - let mut c = Vec::new(); - if let Ok(exe) = std::env::current_exe() - && let Some(exe_dir) = exe.parent() - { - c.push(exe_dir.join("index.mjs")); - c.push(exe_dir.join("../dist/index.mjs")); - c.push(exe_dir.join("../cli/dist/index.mjs")); - } - if let Ok(cwd) = std::env::current_dir() { - c.push(cwd.join("dist/index.mjs")); - c.push(cwd.join("cli/dist/index.mjs")); - } - c - }; - - for candidate in &candidates { - let normalized = candidate - .canonicalize() - .ok() - .unwrap_or_else(|| candidate.clone()); - if normalized.exists() { - return Some(strip_win_prefix(normalized)); - } - } - None -} - -#[cfg(test)] -mod tests { - use super::*; - use std::cell::Cell; - use std::sync::Mutex; - - #[test] - fn test_strip_win_prefix_with_prefix() { - let path = PathBuf::from(r"\\?\C:\Users\test\file.mjs"); - let result = strip_win_prefix(path); - assert_eq!(result, PathBuf::from(r"C:\Users\test\file.mjs")); - } - - #[test] - fn test_strip_win_prefix_without_prefix() { - let path = PathBuf::from(r"C:\Users\test\file.mjs"); - let result = strip_win_prefix(path.clone()); - assert_eq!(result, path); - } - - #[test] - fn test_strip_win_prefix_unix_path() { - let path = PathBuf::from("/home/user/file.mjs"); - let result = strip_win_prefix(path.clone()); - assert_eq!(result, path); - } - - #[test] - fn test_detect_with_cached_success_retries_until_success() { - let cache = Mutex::new(None); - let attempts = Cell::new(0); - - let first = detect_with_cached_success(&cache, || { - attempts.set(attempts.get() + 1); - Option::::None - }); - assert_eq!(first, None); - - let second = detect_with_cached_success(&cache, || { - attempts.set(attempts.get() + 1); - Some(String::from("node")) - }); - assert_eq!(second, Some(String::from("node"))); - - let third = detect_with_cached_success(&cache, || { - attempts.set(attempts.get() + 1); - Some(String::from("other")) - }); - assert_eq!(third, Some(String::from("node"))); - assert_eq!(attempts.get(), 2); - } -} diff --git a/cli/src/cleanup/delete-targets.ts b/cli/src/cleanup/delete-targets.ts deleted file mode 100644 index 4ed5c39e..00000000 --- a/cli/src/cleanup/delete-targets.ts +++ /dev/null @@ -1,71 +0,0 @@ -import * as path from 'node:path' -import {resolveAbsolutePath} from '../ProtectedDeletionGuard' - -export interface CompactedDeletionTargets { - readonly files: string[] - readonly dirs: string[] -} - -function stripTrailingSeparator(rawPath: string): string { - const {root} = path.parse(rawPath) - if (rawPath === root) return rawPath - return rawPath.endsWith(path.sep) ? rawPath.slice(0, -1) : rawPath -} - -export function isSameOrChildDeletionPath(candidate: string, parent: string): boolean { - const normalizedCandidate = stripTrailingSeparator(candidate) - const normalizedParent = stripTrailingSeparator(parent) - if (normalizedCandidate === normalizedParent) return true - return normalizedCandidate.startsWith(`${normalizedParent}${path.sep}`) -} - -export function compactDeletionTargets( - files: readonly string[], - dirs: readonly string[] -): CompactedDeletionTargets { - const filesByKey = new Map() - const dirsByKey = new Map() - - for (const filePath of files) { - const resolvedPath = resolveAbsolutePath(filePath) - filesByKey.set(resolvedPath, resolvedPath) - } - - for (const dirPath of dirs) { - const resolvedPath = resolveAbsolutePath(dirPath) - dirsByKey.set(resolvedPath, resolvedPath) - } - - const compactedDirs = new Map() - const sortedDirEntries = [...dirsByKey.entries()].sort((a, b) => a[0].length - b[0].length) - - for (const [dirKey, dirPath] of sortedDirEntries) { - let coveredByParent = false - for (const existingParentKey of compactedDirs.keys()) { - if (isSameOrChildDeletionPath(dirKey, existingParentKey)) { - coveredByParent = true - break - } - } - - if (!coveredByParent) compactedDirs.set(dirKey, dirPath) - } - - const compactedFiles: string[] = [] - for (const [fileKey, filePath] of filesByKey) { - let coveredByDir = false - for (const dirKey of compactedDirs.keys()) { - if (isSameOrChildDeletionPath(fileKey, dirKey)) { - coveredByDir = true - break - } - } - - if (!coveredByDir) compactedFiles.push(filePath) - } - - compactedFiles.sort((a, b) => a.localeCompare(b)) - const compactedDirPaths = [...compactedDirs.values()].sort((a, b) => a.localeCompare(b)) - - return {files: compactedFiles, dirs: compactedDirPaths} -} diff --git a/cli/src/cleanup/empty-directories.ts b/cli/src/cleanup/empty-directories.ts deleted file mode 100644 index 5ea8a881..00000000 --- a/cli/src/cleanup/empty-directories.ts +++ /dev/null @@ -1,114 +0,0 @@ -import type * as fs from 'node:fs' -import {resolveAbsolutePath} from '../ProtectedDeletionGuard' - -const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES = new Set([ - '.git', - 'node_modules', - 'dist', - 'target', - '.next', - '.turbo', - 'coverage', - '.nyc_output', - '.cache', - '.vite', - '.vite-temp', - '.pnpm-store', - '.yarn', - '.idea', - '.volumes', - 'volumes' -]) - -export interface WorkspaceEmptyDirectoryPlan { - readonly emptyDirsToDelete: string[] -} - -export interface WorkspaceEmptyDirectoryPlannerOptions { - readonly fs: typeof import('node:fs') - readonly path: typeof import('node:path') - readonly workspaceDir: string - readonly filesToDelete: readonly string[] - readonly dirsToDelete: readonly string[] -} - -function shouldSkipEmptyDirectoryTree( - nodePath: typeof import('node:path'), - workspaceDir: string, - currentDir: string -): boolean { - if (currentDir === workspaceDir) return false - return EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES.has( - nodePath.basename(currentDir) - ) -} - -export function planWorkspaceEmptyDirectoryCleanup( - options: WorkspaceEmptyDirectoryPlannerOptions -): WorkspaceEmptyDirectoryPlan { - const workspaceDir = resolveAbsolutePath(options.workspaceDir) - const filesToDelete = new Set(options.filesToDelete.map(resolveAbsolutePath)) - const dirsToDelete = new Set(options.dirsToDelete.map(resolveAbsolutePath)) - const emptyDirsToDelete = new Set() - - // Track which directories are scheduled for deletion (dirsToDelete + emptyDirsToDelete) - const isScheduledForDeletion = (dirPath: string): boolean => dirsToDelete.has(dirPath) || emptyDirsToDelete.has(dirPath) - - const collectEmptyDirectories = (currentDir: string): boolean => { - if (isScheduledForDeletion(currentDir)) return true - if (shouldSkipEmptyDirectoryTree(options.path, workspaceDir, currentDir)) - { return false } - - let entries: fs.Dirent[] - try { - entries = options.fs.readdirSync(currentDir, {withFileTypes: true}) - } catch { - return false - } - - let hasRetainedEntries = false - - for (const entry of entries) { - const entryPath = resolveAbsolutePath( - options.path.join(currentDir, entry.name) - ) - - if (isScheduledForDeletion(entryPath)) continue - - if (entry.isDirectory()) { - if ( - shouldSkipEmptyDirectoryTree(options.path, workspaceDir, entryPath) - ) { - hasRetainedEntries = true - continue - } - - if (collectEmptyDirectories(entryPath)) { - emptyDirsToDelete.add(entryPath) - continue - } - - hasRetainedEntries = true - continue - } - - if (filesToDelete.has(entryPath)) continue - hasRetainedEntries = true - } - - return !hasRetainedEntries - } - - // Iteratively collect empty directories until no new ones are found - // This handles the case where deleting a child directory makes its parent empty - let previousSize = -1 - while (emptyDirsToDelete.size !== previousSize) { - previousSize = emptyDirsToDelete.size - collectEmptyDirectories(workspaceDir) - } - - return { - emptyDirsToDelete: [...emptyDirsToDelete].sort((a, b) => - a.localeCompare(b)) - } -} diff --git a/cli/src/cli-runtime.test.ts b/cli/src/cli-runtime.test.ts deleted file mode 100644 index ab877f20..00000000 --- a/cli/src/cli-runtime.test.ts +++ /dev/null @@ -1,67 +0,0 @@ -import {afterEach, describe, expect, it, vi} from 'vitest' - -const { - createDefaultPluginConfigMock, - pipelineRunMock, - pluginPipelineCtorMock -} = vi.hoisted(() => ({ - createDefaultPluginConfigMock: vi.fn(), - pipelineRunMock: vi.fn(), - pluginPipelineCtorMock: vi.fn() -})) - -vi.mock('./plugin.config', () => ({ - createDefaultPluginConfig: createDefaultPluginConfigMock -})) - -vi.mock('./PluginPipeline', () => ({ - PluginPipeline: function MockPluginPipeline(...args: unknown[]) { - pluginPipelineCtorMock(...args) - return { - run: pipelineRunMock - } - } -})) - -afterEach(() => { - vi.clearAllMocks() - vi.resetModules() -}) - -describe('cli runtime lightweight commands', () => { - it('does not load plugin config for --version', async () => { - const {runCli} = await import('./cli-runtime') - - const exitCode = await runCli(['node', 'tnmsc', '--version']) - - expect(exitCode).toBe(0) - expect(createDefaultPluginConfigMock).not.toHaveBeenCalled() - expect(pluginPipelineCtorMock).not.toHaveBeenCalled() - expect(pipelineRunMock).not.toHaveBeenCalled() - }) - - it('emits JSON for --version --json without loading plugin config', async () => { - const {runCli} = await import('./cli-runtime') - const writeSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) - - try { - const exitCode = await runCli(['node', 'tnmsc', '--version', '--json']) - - expect(exitCode).toBe(0) - expect(createDefaultPluginConfigMock).not.toHaveBeenCalled() - expect(pluginPipelineCtorMock).not.toHaveBeenCalled() - expect(pipelineRunMock).not.toHaveBeenCalled() - - const payload = JSON.parse(String(writeSpy.mock.calls[0]?.[0])) as { - readonly success: boolean - readonly message?: string - } - - expect(payload.success).toBe(true) - expect(payload.message).toBe('Version displayed') - } - finally { - writeSpy.mockRestore() - } - }) -}) diff --git a/cli/src/cli-runtime.ts b/cli/src/cli-runtime.ts deleted file mode 100644 index 213b8bdf..00000000 --- a/cli/src/cli-runtime.ts +++ /dev/null @@ -1,106 +0,0 @@ -import type {Command, CommandContext, CommandResult} from '@/commands/Command' -import * as path from 'node:path' -import process from 'node:process' -import {JsonOutputCommand, toJsonCommandResult} from '@/commands/JsonOutputCommand' -import {buildUnhandledExceptionDiagnostic} from '@/diagnostics' -import {PluginPipeline} from '@/PluginPipeline' -import {mergeConfig} from './config' -import {extractUserArgs, parseArgs, resolveCommand} from './pipeline/CliArgumentParser' -import {createDefaultPluginConfig} from './plugin.config' -import {createLogger, drainBufferedDiagnostics, FilePathKind, setGlobalLogLevel} from './plugins/plugin-core' - -const LIGHTWEIGHT_COMMAND_NAMES = new Set(['help', 'version', 'unknown']) - -export function isJsonMode(argv: readonly string[]): boolean { - return argv.some(arg => arg === '--json' || arg === '-j' || /^-[^-]*j/.test(arg)) -} - -function writeJsonFailure(error: unknown): void { - const errorMessage = error instanceof Error ? error.message : String(error) - const logger = createLogger('main', 'silent') - logger.error(buildUnhandledExceptionDiagnostic('main', error)) - process.stdout.write(`${JSON.stringify(toJsonCommandResult({ - success: false, - filesAffected: 0, - dirsAffected: 0, - message: errorMessage - }, drainBufferedDiagnostics()))}\n`) -} - -function createUnavailableContext(kind: 'cleanup' | 'write'): never { - throw new Error(`${kind} context is unavailable for lightweight commands`) -} - -function createLightweightCommandContext(logLevel: ReturnType['logLevel']): CommandContext { - const workspaceDir = process.cwd() - const userConfigOptions = mergeConfig({ - workspaceDir, - ...logLevel != null ? {logLevel} : {} - }) - - return { - logger: createLogger('PluginPipeline', logLevel), - outputPlugins: [], - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir) - }, - projects: [] - } - }, - userConfigOptions, - createCleanContext: () => createUnavailableContext('cleanup'), - createWriteContext: () => createUnavailableContext('write') - } -} - -function resolveLightweightCommand(argv: readonly string[]): { - readonly command: Command - readonly context: CommandContext -} | undefined { - const filteredArgs = argv.filter((arg): arg is string => arg != null) - const parsedArgs = parseArgs(extractUserArgs(filteredArgs)) - let command: Command = resolveCommand(parsedArgs) - - if (!LIGHTWEIGHT_COMMAND_NAMES.has(command.name)) return void 0 - - if (parsedArgs.logLevel != null) setGlobalLogLevel(parsedArgs.logLevel) - - if (parsedArgs.jsonFlag) { - setGlobalLogLevel('silent') - command = new JsonOutputCommand(command) - } - - return { - command, - context: createLightweightCommandContext(parsedArgs.logLevel) - } -} - -export async function runCli(argv: readonly string[] = process.argv): Promise { - try { - const lightweightCommand = resolveLightweightCommand(argv) - if (lightweightCommand != null) { - const result: CommandResult = await lightweightCommand.command.execute(lightweightCommand.context) - return result.success ? 0 : 1 - } - - const pipeline = new PluginPipeline(...argv) - const userPluginConfig = await createDefaultPluginConfig(argv) - const result = await pipeline.run(userPluginConfig) - return result.success ? 0 : 1 - } - catch (error) { - if (isJsonMode(argv)) { - writeJsonFailure(error) - return 1 - } - - const logger = createLogger('main', 'error') - logger.error(buildUnhandledExceptionDiagnostic('main', error)) - return 1 - } -} diff --git a/cli/src/cli.rs b/cli/src/cli.rs index ab9c5815..81597a02 100644 --- a/cli/src/cli.rs +++ b/cli/src/cli.rs @@ -213,7 +213,6 @@ pub fn resolve_command(cli: &Cli) -> ResolvedCommand { } else { let pairs = parse_key_value_pairs(args); if pairs.is_empty() { - // No key=value pairs and no --show: default to execute ResolvedCommand::Execute } else { ResolvedCommand::Config(pairs) @@ -223,114 +222,3 @@ pub fn resolve_command(cli: &Cli) -> ResolvedCommand { Some(CliCommand::Plugins) => ResolvedCommand::Plugins, } } - -#[cfg(test)] -mod tests { - use super::*; - - fn parse(args: &[&str]) -> Cli { - Cli::try_parse_from(args).unwrap() - } - - #[test] - fn test_no_args_defaults_to_execute() { - let cli = parse(&["tnmsc"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::Execute); - } - - #[test] - fn test_help_subcommand() { - let cli = parse(&["tnmsc", "help"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::Help); - } - - #[test] - fn test_version_subcommand() { - let cli = parse(&["tnmsc", "version"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::Version); - } - - #[test] - fn test_dry_run_subcommand() { - let cli = parse(&["tnmsc", "dry-run"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::DryRun); - } - - #[test] - fn test_clean_subcommand() { - let cli = parse(&["tnmsc", "clean"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::Clean); - } - - #[test] - fn test_clean_dry_run() { - let cli = parse(&["tnmsc", "clean", "--dry-run"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::DryRunClean); - } - - #[test] - fn test_clean_short_dry_run() { - let cli = parse(&["tnmsc", "clean", "-n"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::DryRunClean); - } - - #[test] - fn test_config_show() { - let cli = parse(&["tnmsc", "config", "--show"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::ConfigShow); - } - - #[test] - fn test_config_set() { - let cli = parse(&["tnmsc", "config", "workspaceDir=~/my-project"]); - assert_eq!( - resolve_command(&cli), - ResolvedCommand::Config(vec![("workspaceDir".into(), "~/my-project".into())]) - ); - } - - #[test] - fn test_config_set_flag() { - let cli = parse(&["tnmsc", "config", "--set", "logLevel=debug"]); - assert_eq!( - resolve_command(&cli), - ResolvedCommand::Config(vec![("logLevel".into(), "debug".into())]) - ); - } - - #[test] - fn test_plugins_subcommand() { - let cli = parse(&["tnmsc", "plugins"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::Plugins); - } - - #[test] - fn test_json_flag() { - let cli = parse(&["tnmsc", "--json"]); - assert!(cli.json); - } - - #[test] - fn test_json_short_flag() { - let cli = parse(&["tnmsc", "-j"]); - assert!(cli.json); - } - - #[test] - fn test_log_level_trace() { - let cli = parse(&["tnmsc", "--trace"]); - assert_eq!(resolve_log_level(&cli), Some(ResolvedLogLevel::Trace)); - } - - #[test] - fn test_log_level_multiple_most_verbose_wins() { - let cli = parse(&["tnmsc", "--warn", "--debug"]); - assert_eq!(resolve_log_level(&cli), Some(ResolvedLogLevel::Debug)); - } - - #[test] - fn test_no_log_level() { - let cli = parse(&["tnmsc"]); - assert_eq!(resolve_log_level(&cli), None); - } -} diff --git a/cli/src/commands/CleanCommand.ts b/cli/src/commands/CleanCommand.ts deleted file mode 100644 index bb8be0a8..00000000 --- a/cli/src/commands/CleanCommand.ts +++ /dev/null @@ -1,34 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {performCleanup} from './CleanupUtils' - -/** - * Clean command - deletes registered output files and directories - */ -export class CleanCommand implements Command { - readonly name = 'clean' - - async execute(ctx: CommandContext): Promise { - const {logger, outputPlugins, createCleanContext} = ctx - logger.info('running clean pipeline', {command: 'clean'}) - - const cleanCtx = createCleanContext(false) - const result = await performCleanup(outputPlugins, cleanCtx, logger) - - if (result.violations.length > 0 || result.conflicts.length > 0) { - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - ...result.message != null ? {message: result.message} : {} - } - } - - logger.info('clean complete', {deletedFiles: result.deletedFiles, deletedDirs: result.deletedDirs}) - - return { - success: true, - filesAffected: result.deletedFiles, - dirsAffected: result.deletedDirs - } - } -} diff --git a/cli/src/commands/CleanupUtils.adapter.test.ts b/cli/src/commands/CleanupUtils.adapter.test.ts deleted file mode 100644 index 069ea3ab..00000000 --- a/cli/src/commands/CleanupUtils.adapter.test.ts +++ /dev/null @@ -1,156 +0,0 @@ -import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it, vi} from 'vitest' -import {FilePathKind, PluginKind} from '../plugins/plugin-core' - -const nativeBindingMocks = vi.hoisted(() => ({ - planCleanup: vi.fn<(snapshotJson: string) => string>(), - performCleanup: vi.fn<(snapshotJson: string) => string>() -})) - -vi.mock('../core/native-binding', () => ({ - getNativeBinding: () => ({ - ...globalThis.__TNMSC_TEST_NATIVE_BINDING__, - planCleanup: nativeBindingMocks.planCleanup, - performCleanup: nativeBindingMocks.performCleanup - }) -})) - -const cleanupModulePromise = import('./CleanupUtils') - -function createMockLogger(): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger -} - -function createCleanContext(workspaceDir: string): OutputCleanContext { - return { - logger: createMockLogger(), - fs, - path, - glob, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [ - { - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceDir, 'project-a') - } - } - ] - }, - aindexDir: path.join(workspaceDir, 'aindex') - } - } as OutputCleanContext -} - -function createMockOutputPlugin(): OutputPlugin { - return { - type: PluginKind.Output, - name: 'MockOutputPlugin', - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return [{path: path.join('/tmp', 'project-a', 'AGENTS.md'), source: {}}] - }, - async declareCleanupPaths(): Promise { - return { - delete: [{kind: 'glob', path: path.join('/tmp', '.codex', 'skills', '*'), excludeBasenames: ['.system']}] - } - }, - async convertContent() { - return 'test' - } - } -} - -describe('cleanupUtils native adapter', () => { - it('uses the native cleanup bridge when it is available', async () => { - nativeBindingMocks.planCleanup.mockReset() - nativeBindingMocks.performCleanup.mockReset() - - nativeBindingMocks.planCleanup.mockReturnValue( - JSON.stringify({ - filesToDelete: ['/tmp/project-a/AGENTS.md'], - dirsToDelete: ['/tmp/.codex/skills/legacy'], - emptyDirsToDelete: ['/tmp/.codex/skills'], - violations: [], - conflicts: [], - excludedScanGlobs: ['**/.git/**'] - }) - ) - nativeBindingMocks.performCleanup.mockReturnValue( - JSON.stringify({ - deletedFiles: 1, - deletedDirs: 2, - errors: [], - violations: [], - conflicts: [], - filesToDelete: ['/tmp/project-a/AGENTS.md'], - dirsToDelete: ['/tmp/.codex/skills/legacy'], - emptyDirsToDelete: ['/tmp/.codex/skills'], - excludedScanGlobs: ['**/.git/**'] - }) - ) - - const {collectDeletionTargets, hasNativeCleanupBinding, performCleanup} = await cleanupModulePromise - const workspaceDir = path.resolve('tmp-native-cleanup-adapter') - const cleanCtx = createCleanContext(workspaceDir) - const plugin = createMockOutputPlugin() - - expect(hasNativeCleanupBinding()).toBe(true) - - const plan = await collectDeletionTargets([plugin], cleanCtx) - expect(plan).toEqual({ - filesToDelete: ['/tmp/project-a/AGENTS.md'], - dirsToDelete: ['/tmp/.codex/skills/legacy'], - emptyDirsToDelete: ['/tmp/.codex/skills'], - violations: [], - conflicts: [], - excludedScanGlobs: ['**/.git/**'] - }) - expect(nativeBindingMocks.planCleanup).toHaveBeenCalledOnce() - - const planSnapshot = JSON.parse(String(nativeBindingMocks.planCleanup.mock.calls[0]?.[0])) as { - readonly pluginSnapshots: readonly {pluginName: string, outputs: readonly string[], cleanup: {delete?: readonly {kind: string}[]}}[] - } - expect(planSnapshot.pluginSnapshots).toEqual([ - expect.objectContaining({ - pluginName: 'MockOutputPlugin', - outputs: ['/tmp/project-a/AGENTS.md'], - cleanup: expect.objectContaining({ - delete: [expect.objectContaining({kind: 'glob'})] - }) - }) - ]) - - const result = await performCleanup([plugin], cleanCtx, createMockLogger()) - expect(result).toEqual({ - deletedFiles: 1, - deletedDirs: 3, - errors: [], - violations: [], - conflicts: [] - }) - expect(nativeBindingMocks.performCleanup).toHaveBeenCalledOnce() - }) -}) diff --git a/cli/src/commands/CleanupUtils.test.ts b/cli/src/commands/CleanupUtils.test.ts deleted file mode 100644 index 9d4f9f62..00000000 --- a/cli/src/commands/CleanupUtils.test.ts +++ /dev/null @@ -1,782 +0,0 @@ -import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {FilePathKind, IDEKind, PluginKind} from '../plugins/plugin-core' -import {collectDeletionTargets, performCleanup} from './CleanupUtils' - -function createMockLogger(): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger -} - -function createRecordingLogger(): ILogger & {debugMessages: unknown[]} { - const debugMessages: unknown[] = [] - - return { - debugMessages, - trace: () => {}, - debug: message => { - debugMessages.push(message) - }, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger & {debugMessages: unknown[]} -} - -function createCleanContext( - overrides?: Partial, - pluginOptionsOverrides?: Parameters[0] -): OutputCleanContext { - const workspaceDir = path.resolve('tmp-cleanup-utils-workspace') - return { - logger: createMockLogger(), - fs, - path, - glob, - dryRun: true, - pluginOptions: mergeConfig(pluginOptionsOverrides ?? {}), - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - }, - ...overrides - } - } as OutputCleanContext -} - -function createMockOutputPlugin(name: string, outputs: readonly string[], cleanup?: OutputCleanupDeclarations): OutputPlugin { - return { - type: PluginKind.Output, - name, - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return outputs.map(output => ({path: output, source: {}})) - }, - async declareCleanupPaths() { - return cleanup ?? {} - }, - async convertContent() { - return '' - } - } -} - -describe('collectDeletionTargets', () => { - it('throws when an output path matches a protected input source file', async () => { - const editorSource = path.resolve('tmp-aindex/public/.editorconfig') - const ignoreSource = path.resolve('tmp-aindex/public/.cursorignore') - - const ctx = createCleanContext({ - editorConfigFiles: [ - { - type: IDEKind.EditorConfig, - content: 'root = true', - length: 11, - filePathKind: FilePathKind.Absolute, - dir: { - pathKind: FilePathKind.Absolute, - path: editorSource, - getDirectoryName: () => '.editorconfig' - } - } - ], - aiAgentIgnoreConfigFiles: [ - { - fileName: '.cursorignore', - content: 'node_modules', - sourcePath: ignoreSource - } - ] - }) - - const plugin = createMockOutputPlugin('MockOutputPlugin', [editorSource, ignoreSource]) - - await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') - }) - - it('keeps non-overlapping output paths for cleanup', async () => { - const outputA = path.resolve('tmp-out/a.md') - const outputB = path.resolve('tmp-out/b.md') - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [outputA, outputB]) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(new Set(result.filesToDelete)).toEqual(new Set([outputA, outputB])) - expect(result.violations).toEqual([]) - }) - - it('throws when an output path matches a known aindex protected config file', async () => { - const aindexDir = path.resolve('tmp-aindex') - const editorConfigOutput = path.resolve(aindexDir, 'public', '.editorconfig') - const ctx = createCleanContext({aindexDir}) - const plugin = createMockOutputPlugin('MockOutputPlugin', [editorConfigOutput]) - - await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') - }) - - it('compacts nested delete targets to reduce IO', async () => { - const claudeBaseDir = path.resolve('tmp-out/.claude') - const ruleDir = path.join(claudeBaseDir, 'rules') - const ruleFile = path.join(ruleDir, 'a.md') - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [ruleFile], { - delete: [ - {kind: 'directory', path: claudeBaseDir}, - {kind: 'directory', path: ruleDir}, - {kind: 'file', path: ruleFile} - ] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([claudeBaseDir]) - expect(result.filesToDelete).toEqual([]) - }) - - it('skips parent deletion when a protected child path exists', async () => { - const codexBaseDir = path.resolve('tmp-out/.codex') - const promptsDir = path.join(codexBaseDir, 'prompts') - const protectedSystemDir = path.join(codexBaseDir, 'skills', '.system') - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [ - {kind: 'directory', path: codexBaseDir}, - {kind: 'directory', path: promptsDir} - ], - protect: [{kind: 'directory', path: protectedSystemDir}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([promptsDir]) - expect(result.violations.map(violation => violation.targetPath)).toEqual([codexBaseDir]) - }) - - it('blocks deleting dangerous roots and returns the most specific matching rule', async () => { - const homeDir = os.homedir() - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [{kind: 'directory', path: homeDir}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([]) - expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual([ - expect.objectContaining({ - targetPath: path.resolve(homeDir), - protectedPath: path.resolve('tmp-cleanup-utils-workspace', 'knowladge'), - protectionMode: 'direct' - }) - ]) - }) - - it('throws when an output path matches a built-in protected path before directory guards run', async () => { - const workspaceDir = path.resolve('tmp-workspace-root') - const projectRoot = path.join(workspaceDir, 'project-a') - const aindexDir = path.join(workspaceDir, 'aindex') - const globalAindexDir = path.join(os.homedir(), '.aindex') - const globalConfigPath = path.join(globalAindexDir, '.tnmsc.json') - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [ - { - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => projectRoot - } - } - ] - }, - aindexDir - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [globalConfigPath], { - delete: [ - {kind: 'directory', path: globalAindexDir}, - {kind: 'directory', path: workspaceDir}, - {kind: 'directory', path: projectRoot}, - {kind: 'directory', path: aindexDir} - ] - }) - - await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow( - `Cleanup protection conflict: 1 output path(s) are also protected: ${path.resolve(globalConfigPath)}` - ) - }) - - it('allows deleting non-mdx files under dist while blocking reserved dist mdx files', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-dist-mdx-')) - const workspaceDir = path.join(tempDir, 'workspace') - const distCommandDir = path.join(workspaceDir, 'aindex', 'dist', 'commands') - const projectChildFile = path.join(workspaceDir, 'project-a', 'AGENTS.md') - const protectedDistMdxFile = path.join(distCommandDir, 'demo.mdx') - const safeDistMarkdownFile = path.join(distCommandDir, 'README.md') - const globalChildDir = path.join(os.homedir(), '.aindex', '.codex', 'prompts') - const aindexSourceDir = path.join(workspaceDir, 'aindex', 'commands') - - fs.mkdirSync(path.dirname(projectChildFile), {recursive: true}) - fs.mkdirSync(distCommandDir, {recursive: true}) - fs.mkdirSync(aindexSourceDir, {recursive: true}) - fs.writeFileSync(projectChildFile, '# agent', 'utf8') - fs.writeFileSync(protectedDistMdxFile, '# compiled', 'utf8') - fs.writeFileSync(safeDistMarkdownFile, '# doc', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [ - { - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceDir, 'project-a') - } - } - ] - }, - aindexDir: path.join(workspaceDir, 'aindex') - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [projectChildFile, safeDistMarkdownFile], { - delete: [ - {kind: 'file', path: protectedDistMdxFile}, - {kind: 'directory', path: globalChildDir}, - {kind: 'directory', path: aindexSourceDir} - ] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(new Set(result.filesToDelete)).toEqual(new Set([path.resolve(projectChildFile), path.resolve(safeDistMarkdownFile)])) - const allDirsToDelete = [...result.dirsToDelete, ...result.emptyDirsToDelete] - expect(new Set(allDirsToDelete)).toEqual(new Set([path.resolve(globalChildDir), path.resolve(aindexSourceDir), path.resolve(workspaceDir, 'project-a')])) - expect(result.violations).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - targetPath: path.resolve(protectedDistMdxFile), - protectionMode: 'direct', - protectedPath: path.resolve(protectedDistMdxFile) - }), - expect.objectContaining({targetPath: path.resolve(aindexSourceDir)}) - ]) - ) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('blocks deleting a dist directory when protected mdx descendants exist', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-dist-dir-')) - const workspaceDir = path.join(tempDir, 'workspace') - const distCommandDir = path.join(workspaceDir, 'aindex', 'dist', 'commands') - const protectedDistMdxFile = path.join(distCommandDir, 'demo.mdx') - - fs.mkdirSync(distCommandDir, {recursive: true}) - fs.writeFileSync(protectedDistMdxFile, '# compiled', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - }, - aindexDir: path.join(workspaceDir, 'aindex') - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [{kind: 'directory', path: distCommandDir}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([]) - expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual([ - expect.objectContaining({ - targetPath: path.resolve(distCommandDir), - protectionMode: 'direct', - protectedPath: path.resolve(protectedDistMdxFile) - }) - ]) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('allows deleting non-mdx files under app while blocking reserved app mdx files', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-app-mdx-')) - const workspaceDir = path.join(tempDir, 'workspace') - const appDir = path.join(workspaceDir, 'aindex', 'app') - const protectedAppMdxFile = path.join(appDir, 'guide.mdx') - const safeAppMarkdownFile = path.join(appDir, 'README.md') - - fs.mkdirSync(appDir, {recursive: true}) - fs.writeFileSync(protectedAppMdxFile, '# app guide', 'utf8') - fs.writeFileSync(safeAppMarkdownFile, '# readme', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - }, - aindexDir: path.join(workspaceDir, 'aindex') - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [safeAppMarkdownFile], { - delete: [{kind: 'file', path: protectedAppMdxFile}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - targetPath: path.resolve(protectedAppMdxFile), - protectionMode: 'direct', - protectedPath: path.resolve(protectedAppMdxFile) - }), - expect.objectContaining({targetPath: path.resolve(safeAppMarkdownFile)}) - ]) - ) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('throws when an output file path exactly matches a cleanup protect declaration', async () => { - const outputPath = path.resolve('tmp-out/protected.md') - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [outputPath], { - protect: [{kind: 'file', path: outputPath}] - }) - - await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') - }) - - it('blocks deleting an app directory when protected mdx descendants exist', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-app-dir-')) - const workspaceDir = path.join(tempDir, 'workspace') - const appSubDir = path.join(workspaceDir, 'aindex', 'app', 'nested') - const protectedAppMdxFile = path.join(appSubDir, 'guide.mdx') - - fs.mkdirSync(appSubDir, {recursive: true}) - fs.writeFileSync(protectedAppMdxFile, '# app guide', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - }, - aindexDir: path.join(workspaceDir, 'aindex') - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [{kind: 'directory', path: path.join(workspaceDir, 'aindex', 'app')}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([]) - expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual([ - expect.objectContaining({ - targetPath: path.resolve(path.join(workspaceDir, 'aindex', 'app')), - protectionMode: 'direct', - protectedPath: path.resolve(protectedAppMdxFile) - }) - ]) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('blocks symlink targets that resolve to a protected path and keeps the most specific match', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-guard-')) - const workspaceDir = path.join(tempDir, 'workspace') - const symlinkPath = path.join(tempDir, 'workspace-link') - - fs.mkdirSync(workspaceDir, {recursive: true}) - - try { - const symlinkType: 'junction' | 'dir' = process.platform === 'win32' ? 'junction' : 'dir' - fs.symlinkSync(workspaceDir, symlinkPath, symlinkType) - - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [{kind: 'directory', path: symlinkPath}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([]) - expect(result.violations).toEqual([ - expect.objectContaining({ - targetPath: path.resolve(symlinkPath), - protectedPath: path.resolve(path.join(workspaceDir, 'knowladge')), - protectionMode: 'direct' - }) - ]) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('lets direct protect declarations keep descendants deletable while recursive protect declarations block them', async () => { - const workspaceDir = path.resolve('tmp-direct-vs-recursive') - const directProtectedDir = path.join(workspaceDir, 'project-a') - const recursiveProtectedDir = path.join(workspaceDir, 'aindex', 'dist') - const directChildFile = path.join(directProtectedDir, 'AGENTS.md') - const recursiveChildFile = path.join(recursiveProtectedDir, 'commands', 'demo.mdx') - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [directChildFile, recursiveChildFile], { - protect: [ - {kind: 'directory', path: directProtectedDir, protectionMode: 'direct'}, - {kind: 'directory', path: recursiveProtectedDir, protectionMode: 'recursive'} - ] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.filesToDelete).toEqual([path.resolve(directChildFile)]) - expect(result.violations).toEqual([ - expect.objectContaining({ - targetPath: path.resolve(recursiveChildFile), - protectionMode: 'recursive', - protectedPath: path.resolve(recursiveProtectedDir) - }) - ]) - }) - - it('skips delete glob matches covered by excludeScanGlobs while still deleting other sibling directories', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-exclude-glob-')) - const skillsDir = path.join(tempDir, '.cursor', 'skills-cursor') - const preservedDir = path.join(skillsDir, 'create-rule') - const staleDir = path.join(skillsDir, 'legacy-skill') - - fs.mkdirSync(preservedDir, {recursive: true}) - fs.mkdirSync(staleDir, {recursive: true}) - fs.writeFileSync(path.join(preservedDir, 'SKILL.md'), '# preserved', 'utf8') - fs.writeFileSync(path.join(staleDir, 'SKILL.md'), '# stale', 'utf8') - - try { - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [{kind: 'glob', path: path.join(skillsDir, '*')}], - protect: [{kind: 'directory', path: preservedDir}], - excludeScanGlobs: [preservedDir, path.join(preservedDir, '**')] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([path.resolve(staleDir)]) - expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual([]) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('throws when an output path matches the configured workspace prompt source file', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-workspace-src-')) - const workspaceDir = path.join(tempDir, 'workspace') - const aindexDir = path.join(workspaceDir, 'aindex-meta') - const workspacePromptSource = path.join(aindexDir, 'meta', 'workspace.src.mdx') - - fs.mkdirSync(path.dirname(workspacePromptSource), {recursive: true}) - fs.writeFileSync(workspacePromptSource, '# workspace', 'utf8') - - try { - const ctx = createCleanContext( - { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - }, - aindexDir - }, - { - workspaceDir, - aindex: { - dir: 'aindex-meta', - workspacePrompt: { - src: 'meta/workspace.src.mdx', - dist: 'compiled/workspace.mdx' - } - } - } as Parameters[0] - ) - const plugin = createMockOutputPlugin('MockOutputPlugin', [workspacePromptSource]) - - await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('plans workspace empty directories while skipping excluded trees and symlink entries', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-empty-sweep-')) - const workspaceDir = path.join(tempDir, 'workspace') - const sourceLeafDir = path.join(workspaceDir, 'source', 'empty', 'leaf') - const sourceKeepFile = path.join(workspaceDir, 'source', 'keep.md') - const distEmptyDir = path.join(workspaceDir, 'dist', 'ghost') - const nodeModulesEmptyDir = path.join(workspaceDir, 'node_modules', 'pkg', 'ghost') - const gitEmptyDir = path.join(workspaceDir, '.git', 'objects', 'info') - const symlinkTarget = path.join(tempDir, 'symlink-target') - const symlinkParentDir = path.join(workspaceDir, 'symlink-parent') - const symlinkPath = path.join(symlinkParentDir, 'linked') - - fs.mkdirSync(sourceLeafDir, {recursive: true}) - fs.mkdirSync(path.dirname(sourceKeepFile), {recursive: true}) - fs.mkdirSync(distEmptyDir, {recursive: true}) - fs.mkdirSync(nodeModulesEmptyDir, {recursive: true}) - fs.mkdirSync(gitEmptyDir, {recursive: true}) - fs.mkdirSync(symlinkTarget, {recursive: true}) - fs.mkdirSync(symlinkParentDir, {recursive: true}) - fs.writeFileSync(sourceKeepFile, '# keep', 'utf8') - - try { - const symlinkType: 'junction' | 'dir' = process.platform === 'win32' ? 'junction' : 'dir' - fs.symlinkSync(symlinkTarget, symlinkPath, symlinkType) - - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', []) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.filesToDelete).toEqual([]) - expect(result.dirsToDelete).toEqual([]) - expect(result.emptyDirsToDelete).toEqual([path.resolve(workspaceDir, 'source', 'empty'), path.resolve(sourceLeafDir)]) - expect(result.emptyDirsToDelete).not.toContain(path.resolve(workspaceDir)) - expect(result.emptyDirsToDelete).not.toContain(path.resolve(distEmptyDir)) - expect(result.emptyDirsToDelete).not.toContain(path.resolve(nodeModulesEmptyDir)) - expect(result.emptyDirsToDelete).not.toContain(path.resolve(gitEmptyDir)) - expect(result.emptyDirsToDelete).not.toContain(path.resolve(symlinkParentDir)) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) -}) - -describe('performCleanup', () => { - it('deletes files and directories in one cleanup pass', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-perform-cleanup-')) - const outputFile = path.join(tempDir, 'project-a', 'AGENTS.md') - const outputDir = path.join(tempDir, '.codex', 'prompts') - const stalePrompt = path.join(outputDir, 'demo.md') - - fs.mkdirSync(path.dirname(outputFile), {recursive: true}) - fs.mkdirSync(outputDir, {recursive: true}) - fs.writeFileSync(outputFile, '# agent', 'utf8') - fs.writeFileSync(stalePrompt, '# prompt', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: tempDir, - getDirectoryName: () => path.basename(tempDir), - getAbsolutePath: () => tempDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [outputFile], { - delete: [{kind: 'directory', path: outputDir}] - }) - - const result = await performCleanup([plugin], ctx, createMockLogger()) - - expect(result).toEqual( - expect.objectContaining({ - deletedFiles: 1, - deletedDirs: 3, - errors: [], - violations: [], - conflicts: [] - }) - ) - expect(fs.existsSync(outputFile)).toBe(false) - expect(fs.existsSync(outputDir)).toBe(false) - expect(fs.existsSync(path.dirname(outputFile))).toBe(false) - expect(fs.existsSync(path.dirname(outputDir))).toBe(false) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('logs aggregated cleanup execution summaries instead of per-path success logs', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-perform-cleanup-logging-')) - const outputFile = path.join(tempDir, 'project-a', 'AGENTS.md') - const outputDir = path.join(tempDir, '.codex', 'prompts') - const stalePrompt = path.join(outputDir, 'demo.md') - const logger = createRecordingLogger() - - fs.mkdirSync(path.dirname(outputFile), {recursive: true}) - fs.mkdirSync(outputDir, {recursive: true}) - fs.writeFileSync(outputFile, '# agent', 'utf8') - fs.writeFileSync(stalePrompt, '# prompt', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: tempDir, - getDirectoryName: () => path.basename(tempDir), - getAbsolutePath: () => tempDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [outputFile], { - delete: [{kind: 'directory', path: outputDir}] - }) - - await performCleanup([plugin], ctx, logger) - - expect(logger.debugMessages).toEqual( - expect.arrayContaining(['cleanup plan built', 'cleanup delete execution started', 'cleanup delete execution complete']) - ) - expect(logger.debugMessages).not.toContainEqual(expect.objectContaining({path: outputFile})) - expect(logger.debugMessages).not.toContainEqual(expect.objectContaining({path: outputDir})) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('deletes generated files and then prunes workspace empty directories', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-perform-cleanup-empty-sweep-')) - const outputFile = path.join(tempDir, 'generated', 'AGENTS.md') - const emptyLeafDir = path.join(tempDir, 'scratch', 'empty', 'leaf') - const retainedScratchFile = path.join(tempDir, 'scratch', 'keep.md') - - fs.mkdirSync(path.dirname(outputFile), {recursive: true}) - fs.mkdirSync(emptyLeafDir, {recursive: true}) - fs.mkdirSync(path.dirname(retainedScratchFile), {recursive: true}) - fs.writeFileSync(outputFile, '# agent', 'utf8') - fs.writeFileSync(retainedScratchFile, '# keep', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: tempDir, - getDirectoryName: () => path.basename(tempDir), - getAbsolutePath: () => tempDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [outputFile]) - - const result = await performCleanup([plugin], ctx, createMockLogger()) - - expect(result).toEqual( - expect.objectContaining({ - deletedFiles: 1, - deletedDirs: 3, - errors: [], - violations: [], - conflicts: [] - }) - ) - expect(fs.existsSync(outputFile)).toBe(false) - expect(fs.existsSync(path.dirname(outputFile))).toBe(false) - expect(fs.existsSync(path.join(tempDir, 'scratch', 'empty', 'leaf'))).toBe(false) - expect(fs.existsSync(path.join(tempDir, 'scratch', 'empty'))).toBe(false) - expect(fs.existsSync(path.join(tempDir, 'scratch'))).toBe(true) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/commands/CleanupUtils.ts b/cli/src/commands/CleanupUtils.ts deleted file mode 100644 index 9c80a82c..00000000 --- a/cli/src/commands/CleanupUtils.ts +++ /dev/null @@ -1,462 +0,0 @@ -import type { - ILogger, - OutputCleanContext, - OutputCleanupDeclarations, - OutputCleanupPathDeclaration, - OutputFileDeclaration, - OutputPlugin, - PluginOptions -} from '../plugins/plugin-core' -import type {ProtectionMode, ProtectionRuleMatcher} from '../ProtectedDeletionGuard' -import {buildDiagnostic, buildFileOperationDiagnostic, diagnosticLines} from '@/diagnostics' -import {loadAindexProjectConfig} from '../aindex-config/AindexProjectConfigLoader' -import {getNativeBinding} from '../core/native-binding' -import {collectAllPluginOutputs} from '../plugins/plugin-core' -import { - collectConfiguredAindexInputRules, - collectProjectRoots, - collectProtectedInputSourceRules, - logProtectedDeletionGuardError -} from '../ProtectedDeletionGuard' - -let nativeCleanupBindingCheck: boolean | null = null - -export interface CleanupResult { - readonly deletedFiles: number - readonly deletedDirs: number - readonly errors: readonly CleanupError[] - readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[] - readonly conflicts: readonly CleanupProtectionConflict[] - readonly message?: string -} - -export interface CleanupError { - readonly path: string - readonly type: 'file' | 'directory' - readonly error: unknown -} - -export interface CleanupProtectionConflict { - readonly outputPath: string - readonly outputPlugin: string - readonly protectedPath: string - readonly protectionMode: ProtectionMode - readonly protectedBy: string - readonly reason: string -} - -export class CleanupProtectionConflictError extends Error { - readonly conflicts: readonly CleanupProtectionConflict[] - - constructor(conflicts: readonly CleanupProtectionConflict[]) { - super(buildCleanupProtectionConflictMessage(conflicts)) - this.name = 'CleanupProtectionConflictError' - this.conflicts = conflicts - } -} - -interface NativeCleanupBinding { - readonly planCleanup?: (snapshotJson: string) => string | Promise - readonly performCleanup?: (snapshotJson: string) => string | Promise -} - -type NativeProtectionMode = 'direct' | 'recursive' -type NativeProtectionRuleMatcher = 'path' | 'glob' -type NativeCleanupTargetKind = 'file' | 'directory' | 'glob' -type NativeCleanupErrorKind = 'file' | 'directory' - -interface NativeCleanupTarget { - readonly path: string - readonly kind: NativeCleanupTargetKind - readonly excludeBasenames?: readonly string[] - readonly protectionMode?: NativeProtectionMode - readonly scope?: string - readonly label?: string -} - -interface NativeCleanupDeclarations { - readonly delete?: readonly NativeCleanupTarget[] - readonly protect?: readonly NativeCleanupTarget[] - readonly excludeScanGlobs?: readonly string[] -} - -interface NativePluginCleanupSnapshot { - readonly pluginName: string - readonly outputs: readonly string[] - readonly cleanup: NativeCleanupDeclarations -} - -interface NativeProtectedRule { - readonly path: string - readonly protectionMode: NativeProtectionMode - readonly reason: string - readonly source: string - readonly matcher?: NativeProtectionRuleMatcher | undefined -} - -interface NativeCleanupSnapshot { - readonly workspaceDir: string - readonly aindexDir?: string - readonly projectRoots: readonly string[] - readonly protectedRules: readonly NativeProtectedRule[] - readonly pluginSnapshots: readonly NativePluginCleanupSnapshot[] - readonly emptyDirExcludeGlobs?: readonly string[] -} - -interface NativeProtectedPathViolation { - readonly targetPath: string - readonly protectedPath: string - readonly protectionMode: NativeProtectionMode - readonly reason: string - readonly source: string -} - -interface NativeCleanupProtectionConflict { - readonly outputPath: string - readonly outputPlugin: string - readonly protectedPath: string - readonly protectionMode: NativeProtectionMode - readonly protectedBy: string - readonly reason: string -} - -interface NativeCleanupPlan { - readonly filesToDelete: string[] - readonly dirsToDelete: string[] - readonly emptyDirsToDelete: string[] - readonly violations: readonly NativeProtectedPathViolation[] - readonly conflicts: readonly NativeCleanupProtectionConflict[] - readonly excludedScanGlobs: string[] -} - -interface NativeCleanupError { - readonly path: string - readonly kind: NativeCleanupErrorKind - readonly error: string -} - -interface NativeCleanupResult { - readonly deletedFiles: number - readonly deletedDirs: number - readonly errors: readonly NativeCleanupError[] - readonly violations: readonly NativeProtectedPathViolation[] - readonly conflicts: readonly NativeCleanupProtectionConflict[] - readonly filesToDelete: string[] - readonly dirsToDelete: string[] - readonly emptyDirsToDelete: string[] - readonly excludedScanGlobs: string[] -} - -export function hasNativeCleanupBinding(): boolean { - if (nativeCleanupBindingCheck !== null) { - return nativeCleanupBindingCheck - } - const nativeBinding = getNativeBinding() - nativeCleanupBindingCheck = nativeBinding?.planCleanup != null && nativeBinding.performCleanup != null - return nativeCleanupBindingCheck -} - -function requireNativeCleanupBinding(): NativeCleanupBinding { - const nativeBinding = getNativeBinding() - if (nativeBinding == null) { - throw new Error('Native cleanup binding is required. Build or install the Rust NAPI package before running tnmsc.') - } - return nativeBinding -} - -function mapProtectionMode(mode: ProtectionMode): NativeProtectionMode { - return mode -} - -function mapProtectionRuleMatcher(matcher: ProtectionRuleMatcher | undefined): NativeProtectionRuleMatcher | undefined { - return matcher -} - -function mapCleanupTarget(target: OutputCleanupPathDeclaration): NativeCleanupTarget { - return { - path: target.path, - kind: target.kind, - ...target.excludeBasenames != null && target.excludeBasenames.length > 0 ? {excludeBasenames: [...target.excludeBasenames]} : {}, - ...target.protectionMode != null ? {protectionMode: mapProtectionMode(target.protectionMode)} : {}, - ...target.scope != null ? {scope: target.scope} : {}, - ...target.label != null ? {label: target.label} : {} - } -} - -async function collectPluginCleanupDeclarations(plugin: OutputPlugin, cleanCtx: OutputCleanContext): Promise { - if (plugin.declareCleanupPaths == null) return {} - return plugin.declareCleanupPaths({...cleanCtx, dryRun: true}) -} - -async function collectPluginCleanupSnapshot( - plugin: OutputPlugin, - cleanCtx: OutputCleanContext, - predeclaredOutputs?: ReadonlyMap -): Promise { - const existingOutputDeclarations = predeclaredOutputs?.get(plugin) - const [outputs, cleanup] = await Promise.all([ - existingOutputDeclarations != null ? Promise.resolve(existingOutputDeclarations) : plugin.declareOutputFiles({...cleanCtx, dryRun: true}), - collectPluginCleanupDeclarations(plugin, cleanCtx) - ]) - - return { - pluginName: plugin.name, - outputs: outputs.map(output => output.path), - cleanup: { - ...cleanup.delete != null && cleanup.delete.length > 0 ? {delete: cleanup.delete.map(mapCleanupTarget)} : {}, - ...cleanup.protect != null && cleanup.protect.length > 0 ? {protect: cleanup.protect.map(mapCleanupTarget)} : {}, - ...cleanup.excludeScanGlobs != null && cleanup.excludeScanGlobs.length > 0 ? {excludeScanGlobs: [...cleanup.excludeScanGlobs]} : {} - } - } -} - -function collectConfiguredCleanupProtectionRules(cleanCtx: OutputCleanContext): NativeProtectedRule[] { - return (cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []).map(rule => ({ - path: rule.path, - protectionMode: mapProtectionMode(rule.protectionMode), - reason: rule.reason ?? 'configured cleanup protection rule', - source: 'configured-cleanup-protection', - matcher: mapProtectionRuleMatcher(rule.matcher ?? 'path') - })) -} - -function buildCleanupProtectionConflictMessage(conflicts: readonly NativeCleanupProtectionConflict[]): string { - const pathList = conflicts.map(conflict => conflict.outputPath).join(', ') - return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}` -} - -function logCleanupProtectionConflicts(logger: ILogger, conflicts: readonly NativeCleanupProtectionConflict[]): void { - const firstConflict = conflicts[0] - - logger.error( - buildDiagnostic({ - code: 'CLEANUP_PROTECTION_CONFLICT_DETECTED', - title: 'Cleanup output paths conflict with protected inputs', - rootCause: diagnosticLines( - `tnmsc found ${conflicts.length} output path(s) that also match protected cleanup rules.`, - firstConflict == null - ? 'No conflict details were captured.' - : `Example conflict: "${firstConflict.outputPath}" is protected by "${firstConflict.protectedPath}".` - ), - exactFix: diagnosticLines('Separate generated output paths from protected source or reserved workspace paths before running cleanup again.'), - possibleFixes: [ - diagnosticLines('Update cleanup protect declarations so they do not overlap generated outputs.'), - diagnosticLines('Move the conflicting output target to a generated-only directory.') - ], - details: { - count: conflicts.length, - conflicts - } - }) - ) -} - -function logCleanupPlanDiagnostics( - logger: ILogger, - plan: Pick< - NativeCleanupPlan | NativeCleanupResult, - 'filesToDelete' | 'dirsToDelete' | 'emptyDirsToDelete' | 'violations' | 'conflicts' | 'excludedScanGlobs' - > -): void { - logger.debug('cleanup plan built', { - filesToDelete: plan.filesToDelete.length, - dirsToDelete: plan.dirsToDelete.length + plan.emptyDirsToDelete.length, - emptyDirsToDelete: plan.emptyDirsToDelete.length, - violations: plan.violations.length, - conflicts: plan.conflicts.length, - excludedScanGlobs: plan.excludedScanGlobs - }) -} - -function logNativeCleanupErrors( - logger: ILogger, - errors: readonly NativeCleanupError[] -): readonly {path: string, type: 'file' | 'directory', error: string}[] { - return errors.map(currentError => { - const type = currentError.kind === 'directory' ? 'directory' : 'file' - logger.warn( - buildFileOperationDiagnostic({ - code: type === 'file' ? 'CLEANUP_FILE_DELETE_FAILED' : 'CLEANUP_DIRECTORY_DELETE_FAILED', - title: type === 'file' ? 'Cleanup could not delete a file' : 'Cleanup could not delete a directory', - operation: 'delete', - targetKind: type, - path: currentError.path, - error: currentError.error, - details: { - phase: 'cleanup' - } - }) - ) - - return {path: currentError.path, type, error: currentError.error} - }) -} - -async function buildCleanupSnapshot( - outputPlugins: readonly OutputPlugin[], - cleanCtx: OutputCleanContext, - predeclaredOutputs?: ReadonlyMap -): Promise { - const pluginSnapshots = await Promise.all(outputPlugins.map(async plugin => collectPluginCleanupSnapshot(plugin, cleanCtx, predeclaredOutputs))) - - const protectedRules: NativeProtectedRule[] = [] - for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) { - protectedRules.push({ - path: rule.path, - protectionMode: mapProtectionMode(rule.protectionMode), - reason: rule.reason, - source: rule.source, - ...rule.matcher != null ? {matcher: mapProtectionRuleMatcher(rule.matcher)} : {} - }) - } - - if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) { - for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, { - workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path - })) { - protectedRules.push({ - path: rule.path, - protectionMode: mapProtectionMode(rule.protectionMode), - reason: rule.reason, - source: rule.source, - ...rule.matcher != null ? {matcher: mapProtectionRuleMatcher(rule.matcher)} : {} - }) - } - } - - protectedRules.push(...collectConfiguredCleanupProtectionRules(cleanCtx)) - - // Load aindex project config (aindex.config.ts) for empty-dir exclude globs - let emptyDirExcludeGlobs: string[] | undefined - if (cleanCtx.collectedOutputContext.aindexDir != null) { - const aindexConfig = await loadAindexProjectConfig(cleanCtx.collectedOutputContext.aindexDir) - if (aindexConfig.found) { - const exclude = aindexConfig.config.emptyDirCleanup?.exclude - if (exclude != null && exclude.length > 0) { - emptyDirExcludeGlobs = [...exclude] - } - } - } - - return { - workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path, - ...cleanCtx.collectedOutputContext.aindexDir != null ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir} : {}, - projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext), - protectedRules, - pluginSnapshots, - ...emptyDirExcludeGlobs != null && emptyDirExcludeGlobs.length > 0 ? {emptyDirExcludeGlobs} : {} - } -} - -function parseNativeJson(json: string): T { - return JSON.parse(json) as T -} - -export async function planCleanupWithNative(snapshot: NativeCleanupSnapshot): Promise { - const nativeBinding = requireNativeCleanupBinding() - if (nativeBinding?.planCleanup == null) throw new Error('Native cleanup planning is unavailable') - const result = await Promise.resolve(nativeBinding.planCleanup(JSON.stringify(snapshot))) - return parseNativeJson(result) -} - -export async function performCleanupWithNative(snapshot: NativeCleanupSnapshot): Promise { - const nativeBinding = requireNativeCleanupBinding() - if (nativeBinding?.performCleanup == null) throw new Error('Native cleanup execution is unavailable') - const result = await Promise.resolve(nativeBinding.performCleanup(JSON.stringify(snapshot))) - return parseNativeJson(result) -} - -export async function collectDeletionTargets( - outputPlugins: readonly OutputPlugin[], - cleanCtx: OutputCleanContext, - predeclaredOutputs?: ReadonlyMap -): Promise<{ - filesToDelete: string[] - dirsToDelete: string[] - emptyDirsToDelete: string[] - violations: import('../ProtectedDeletionGuard').ProtectedPathViolation[] - conflicts: CleanupProtectionConflict[] - excludedScanGlobs: string[] -}> { - const snapshot = await buildCleanupSnapshot(outputPlugins, cleanCtx, predeclaredOutputs) - const plan = await planCleanupWithNative(snapshot) - - if (plan.conflicts.length > 0) { - throw new CleanupProtectionConflictError(plan.conflicts) - } - - return { - filesToDelete: plan.filesToDelete, - dirsToDelete: plan.dirsToDelete.sort((a, b) => a.localeCompare(b)), - emptyDirsToDelete: plan.emptyDirsToDelete.sort((a, b) => a.localeCompare(b)), - violations: [...plan.violations], - conflicts: [], - excludedScanGlobs: plan.excludedScanGlobs - } -} - -export async function performCleanup( - outputPlugins: readonly OutputPlugin[], - cleanCtx: OutputCleanContext, - logger: ILogger, - predeclaredOutputs?: ReadonlyMap -): Promise { - if (predeclaredOutputs != null) { - const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx, predeclaredOutputs) - logger.debug('Collected outputs for cleanup', { - projectDirs: outputs.projectDirs.length, - projectFiles: outputs.projectFiles.length, - globalDirs: outputs.globalDirs.length, - globalFiles: outputs.globalFiles.length - }) - } - - const snapshot = await buildCleanupSnapshot(outputPlugins, cleanCtx, predeclaredOutputs) - const result = await performCleanupWithNative(snapshot) - - logCleanupPlanDiagnostics(logger, result) - - if (result.conflicts.length > 0) { - logCleanupProtectionConflicts(logger, result.conflicts) - return { - deletedFiles: 0, - deletedDirs: 0, - errors: [], - violations: [], - conflicts: result.conflicts, - message: buildCleanupProtectionConflictMessage(result.conflicts) - } - } - - if (result.violations.length > 0) { - logProtectedDeletionGuardError(logger, 'cleanup', result.violations) - return { - deletedFiles: 0, - deletedDirs: 0, - errors: [], - violations: result.violations, - conflicts: [], - message: `Protected deletion guard blocked cleanup for ${result.violations.length} path(s)` - } - } - - logger.debug('cleanup delete execution started', { - filesToDelete: result.filesToDelete.length, - dirsToDelete: result.dirsToDelete.length + result.emptyDirsToDelete.length, - emptyDirsToDelete: result.emptyDirsToDelete.length - }) - const loggedErrors = logNativeCleanupErrors(logger, result.errors) - logger.debug('cleanup delete execution complete', { - deletedFiles: result.deletedFiles, - deletedDirs: result.deletedDirs, - errors: loggedErrors.length - }) - - return { - deletedFiles: result.deletedFiles, - deletedDirs: result.deletedDirs + result.emptyDirsToDelete.length, - errors: loggedErrors, - violations: [], - conflicts: [] - } -} diff --git a/cli/src/commands/Command.ts b/cli/src/commands/Command.ts deleted file mode 100644 index 7f83bc06..00000000 --- a/cli/src/commands/Command.ts +++ /dev/null @@ -1,95 +0,0 @@ -import type {ILogger, LoggerDiagnosticRecord} from '@truenine/logger' -import type { - OutputCleanContext, - OutputCollectedContext, - OutputPlugin, - OutputWriteContext, - PluginOptions, - UserConfigFile -} from '../plugins/plugin-core' - -/** - * Command execution context - */ -export interface CommandContext { - readonly logger: ILogger - readonly outputPlugins: readonly OutputPlugin[] - readonly collectedOutputContext: OutputCollectedContext - readonly userConfigOptions: Required - readonly createCleanContext: (dryRun: boolean) => OutputCleanContext - readonly createWriteContext: (dryRun: boolean) => OutputWriteContext -} - -/** - * Command execution result - */ -export interface CommandResult { - readonly success: boolean - readonly filesAffected: number - readonly dirsAffected: number - readonly message?: string -} - -/** - * Per-plugin execution result for JSON output mode. - * Captures individual plugin execution status, timing, and error details. - */ -export interface PluginExecutionResult { - readonly pluginName: string - readonly kind: 'Input' | 'Output' - readonly status: 'success' | 'failed' | 'skipped' - readonly filesWritten?: number - readonly error?: string - readonly duration?: number -} - -/** - * Structured JSON output for command execution results. - * Extends CommandResult with per-plugin details and error aggregation - * for consumption by Tauri sidecar / external tooling. - */ -export interface JsonCommandResult { - readonly success: boolean - readonly filesAffected: number - readonly dirsAffected: number - readonly message?: string - readonly pluginResults: readonly PluginExecutionResult[] - readonly warnings: readonly LoggerDiagnosticRecord[] - readonly errors: readonly LoggerDiagnosticRecord[] -} - -/** - * JSON output for configuration information. - * Contains the merged config and the source layers that contributed to it. - */ -export interface JsonConfigInfo { - readonly merged: UserConfigFile - readonly sources: readonly ConfigSource[] -} - -/** - * Describes a single configuration source layer. - */ -export interface ConfigSource { - readonly path: string - readonly layer: 'programmatic' | 'global' | 'default' - readonly config: Partial -} - -/** - * JSON output for plugin information listing. - */ -export interface JsonPluginInfo { - readonly name: string - readonly kind: 'Input' | 'Output' - readonly description: string - readonly dependencies: readonly string[] -} - -/** - * Base command interface - */ -export interface Command { - readonly name: string - execute: (ctx: CommandContext) => Promise -} diff --git a/cli/src/commands/CommandFactory.ts b/cli/src/commands/CommandFactory.ts deleted file mode 100644 index 3604485f..00000000 --- a/cli/src/commands/CommandFactory.ts +++ /dev/null @@ -1,29 +0,0 @@ -import type {Command} from './Command' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' - -/** - * Command factory interface - * Each factory knows how to create a specific command based on CLI args - */ -export interface CommandFactory { - canHandle: (args: ParsedCliArgs) => boolean - - createCommand: (args: ParsedCliArgs) => Command -} - -/** - * Priority levels for command factory resolution - * Lower number = higher priority - */ -export enum FactoryPriority { - Flags = 0, // --version, --help flags (highest priority) - Unknown = 1, // Unknown command handling - Subcommand = 2 // Named subcommands -} - -/** - * Extended factory interface with priority - */ -export interface PrioritizedCommandFactory extends CommandFactory { - readonly priority: FactoryPriority -} diff --git a/cli/src/commands/CommandRegistry.ts b/cli/src/commands/CommandRegistry.ts deleted file mode 100644 index 91d16351..00000000 --- a/cli/src/commands/CommandRegistry.ts +++ /dev/null @@ -1,43 +0,0 @@ -import type {Command} from './Command' -import type {CommandFactory, PrioritizedCommandFactory} from './CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {FactoryPriority} from './CommandFactory' - -/** - * Command registry that manages command factories - * Uses priority-based resolution for factory selection - */ -export class CommandRegistry { - private readonly factories: PrioritizedCommandFactory[] = [] - - register(factory: PrioritizedCommandFactory): void { - this.factories.push(factory) - this.factories.sort((a, b) => a.priority - b.priority) // Sort by priority (lower number = higher priority) - } - - registerWithPriority(factory: CommandFactory, priority: FactoryPriority): void { - const prioritized: PrioritizedCommandFactory = { // Create a wrapper that delegates to the original factory while adding priority - priority, - canHandle: (args: ParsedCliArgs) => factory.canHandle(args), - createCommand: (args: ParsedCliArgs) => factory.createCommand(args) - } - this.factories.push(prioritized) - this.factories.sort((a, b) => a.priority - b.priority) - } - - resolve(args: ParsedCliArgs): Command { - for (const factory of this.factories) { // First pass: check prioritized factories (flags, unknown commands) - if (factory.priority <= FactoryPriority.Unknown && factory.canHandle(args)) return factory.createCommand(args) - } - - for (const factory of this.factories) { // Second pass: check subcommand factories - if (factory.priority === FactoryPriority.Subcommand && factory.canHandle(args)) return factory.createCommand(args) - } - - for (const factory of this.factories) { // Third pass: use catch-all factory (ExecuteCommandFactory) - if (factory.canHandle(args)) return factory.createCommand(args) - } - - throw new Error('No command factory found for the given arguments') // This should never happen if ExecuteCommandFactory is registered - } -} diff --git a/cli/src/commands/CommandUtils.ts b/cli/src/commands/CommandUtils.ts deleted file mode 100644 index a1b522f9..00000000 --- a/cli/src/commands/CommandUtils.ts +++ /dev/null @@ -1,70 +0,0 @@ -/** - * Result summary from aggregating plugin outputs - */ -export interface AggregatedResults { - readonly totalFiles: number - readonly totalDirs: number -} - -/** - * Aggregate file and directory counts from plugin results. - * - * @param results - Map of plugin name to their write results - * @returns Aggregated counts of files and directories - */ -export function aggregatePluginResults( - results: Map -): AggregatedResults { - let totalFiles = 0 - let totalDirs = 0 - - for (const result of results.values()) { - totalFiles += result.files.length - totalDirs += result.dirs.length - } - - return {totalFiles, totalDirs} -} - -/** - * Create a standard CommandResult object. - * Centralizes the result object creation pattern used across commands. - * - * @param success - Whether the command succeeded - * @param filesAffected - Number of files affected - * @param dirsAffected - Number of directories affected - * @param message - Optional message - */ -export function createCommandResult( - success: boolean, - filesAffected: number, - dirsAffected: number, - message?: string -): {success: boolean, filesAffected: number, dirsAffected: number, message?: string} { - return message != null - ? {success, filesAffected, dirsAffected, message} - : {success, filesAffected, dirsAffected} -} - -/** - * Log plugin results with a consistent format. - * - * @param results - Map of plugin name to their results - * @param logger - Logger instance for output - * @param logger.info - Logger info method - * @param dryRun - Whether this is a dry-run execution - */ -export function logPluginResults( - results: Map, - logger: {info: (msg: string, meta?: object) => void}, - dryRun: boolean = false -): void { - for (const [pluginName, result] of results) { - logger.info('plugin result', { - plugin: pluginName, - files: result.files.length, - dirs: result.dirs.length, - ...dryRun && {dryRun: true} - }) - } -} diff --git a/cli/src/commands/ConfigCommand.ts b/cli/src/commands/ConfigCommand.ts deleted file mode 100644 index 68b10277..00000000 --- a/cli/src/commands/ConfigCommand.ts +++ /dev/null @@ -1,237 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import type {AindexConfigKeyPath} from '@/plugins/plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {buildUsageDiagnostic, diagnosticLines} from '@/diagnostics' -import {AINDEX_CONFIG_KEY_PATHS} from '@/plugins/plugin-core' -import {getRequiredGlobalConfigPath} from '@/runtime-environment' - -/** - * Valid configuration keys that can be set via `tnmsc config key=value`. - * Nested keys use dot-notation: aindex.skills.src, aindex.commands.src, etc. - */ -type ValidConfigKey = 'workspaceDir' | 'logLevel' | AindexConfigKeyPath - -const VALID_CONFIG_KEYS: readonly ValidConfigKey[] = [ - 'workspaceDir', - ...AINDEX_CONFIG_KEY_PATHS, - 'logLevel' -] - -/** - * Validate if a key is a valid config key - */ -function isValidConfigKey(key: string): key is ValidConfigKey { - return VALID_CONFIG_KEYS.includes(key as ValidConfigKey) -} - -/** - * Validate log level value - */ -function isValidLogLevel(value: string): boolean { - const validLevels = ['trace', 'debug', 'info', 'warn', 'error'] - return validLevels.includes(value) -} - -/** - * Get global config file path - */ -function getGlobalConfigPath(): string { - return getRequiredGlobalConfigPath() -} - -/** - * Read global config file - */ -function readGlobalConfig(): ConfigObject { - const configPath = getGlobalConfigPath() - if (!fs.existsSync(configPath)) return {} - try { - const content = fs.readFileSync(configPath, 'utf8') - return JSON.parse(content) as ConfigObject - } - catch { - return {} - } -} - -/** - * Write global config file - */ -function writeGlobalConfig(config: ConfigObject): void { - const configPath = getGlobalConfigPath() - const configDir = path.dirname(configPath) - - if (!fs.existsSync(configDir)) fs.mkdirSync(configDir, {recursive: true}) // Ensure directory exists - - fs.writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, 'utf8') // Write with pretty formatting -} - -type ConfigValue = string | ConfigObject -interface ConfigObject { - [key: string]: ConfigValue | undefined -} - -/** - * Set a nested value in an object using dot-notation key - */ -function setNestedValue(obj: ConfigObject, key: string, value: string): void { - const parts = key.split('.') - let current: ConfigObject = obj - for (let i = 0; i < parts.length - 1; i++) { - const part = parts[i] - if (part == null) continue - const next = current[part] - if (typeof next !== 'object' || next === null || Array.isArray(next)) current[part] = {} - current = current[part] as ConfigObject - } - - const lastPart = parts.at(-1) - if (lastPart == null) return - current[lastPart] = value -} - -/** - * Get a nested value from an object using dot-notation key - */ -function getNestedValue(obj: ConfigObject, key: string): ConfigValue | undefined { - const parts = key.split('.') - let current: ConfigValue | undefined = obj - for (const part of parts) { - if (typeof current !== 'object' || current === null || Array.isArray(current)) return void 0 - current = current[part] - } - return current -} - -export class ConfigCommand implements Command { - readonly name = 'config' - - constructor( - private readonly options: readonly [key: string, value: string][] - ) { } - - async execute(ctx: CommandContext): Promise { - const {logger} = ctx - - if (this.options.length === 0) { - logger.error(buildUsageDiagnostic({ - code: 'CONFIG_COMMAND_ARGUMENTS_MISSING', - title: 'Config command requires at least one key=value pair', - rootCause: diagnosticLines( - 'tnmsc config was invoked without any configuration assignments.' - ), - exactFix: diagnosticLines( - 'Run `tnmsc config key=value` with at least one supported configuration key.' - ), - possibleFixes: [ - diagnosticLines(`Use one of the supported keys: ${VALID_CONFIG_KEYS.join(', ')}`) - ], - details: { - validKeys: [...VALID_CONFIG_KEYS] - } - })) - logger.info('Usage: tnmsc config key=value') - logger.info(`Valid keys: ${VALID_CONFIG_KEYS.join(', ')}`) - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: 'No options provided' - } - } - - let config: ConfigObject - - try { - config = readGlobalConfig() - } - catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: errorMessage - } - } - - const errors: string[] = [] - const updated: string[] = [] - - for (const [key, value] of this.options) { // Process each key-value pair - if (!isValidConfigKey(key)) { - errors.push(`Invalid key: ${key}`) - logger.error(buildUsageDiagnostic({ - code: 'CONFIG_COMMAND_KEY_INVALID', - title: `Unsupported config key: ${key}`, - rootCause: diagnosticLines( - `The config command received "${key}", which is not a supported configuration key.` - ), - exactFix: diagnosticLines('Use one of the supported config keys and rerun the command.'), - possibleFixes: [ - diagnosticLines(`Supported keys: ${VALID_CONFIG_KEYS.join(', ')}`) - ], - details: { - key, - validKeys: [...VALID_CONFIG_KEYS] - } - })) - continue - } - - if (key === 'logLevel' && !isValidLogLevel(value)) { // Special validation for logLevel - errors.push(`Invalid logLevel value: ${value}`) - logger.error(buildUsageDiagnostic({ - code: 'CONFIG_COMMAND_LOG_LEVEL_INVALID', - title: `Unsupported logLevel value: ${value}`, - rootCause: diagnosticLines( - `The config command received "${value}" for logLevel, but tnmsc does not support that level.` - ), - exactFix: diagnosticLines('Set logLevel to one of: trace, debug, info, warn, or error.'), - details: { - key, - value, - validLevels: ['trace', 'debug', 'info', 'warn', 'error'] - } - })) - continue - } - - const oldValue = getNestedValue(config, key) // Update config - setNestedValue(config, key, value) - - if (oldValue !== value) updated.push(`${key}=${value}`) - - logger.info('configuration updated', {key, value}) - } - - if (updated.length > 0) { // Write config if there are valid updates - try { - writeGlobalConfig(config) - } - catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: errorMessage - } - } - logger.info('global config written', {path: getGlobalConfigPath()}) - } - - const success = errors.length === 0 - const message = success - ? `Configuration updated: ${updated.join(', ')}` - : `Partial update: ${updated.join(', ')}. Errors: ${errors.join(', ')}` - - return { - success, - filesAffected: updated.length > 0 ? 1 : 0, - dirsAffected: 0, - message - } - } -} diff --git a/cli/src/commands/ConfigShowCommand.ts b/cli/src/commands/ConfigShowCommand.ts deleted file mode 100644 index 2a21822a..00000000 --- a/cli/src/commands/ConfigShowCommand.ts +++ /dev/null @@ -1,48 +0,0 @@ -import type {Command, CommandContext, CommandResult, ConfigSource, JsonConfigInfo} from './Command' -import process from 'node:process' -import {ConfigLoader} from '@/ConfigLoader' - -/** - * Command that outputs the current merged configuration and its source layers as JSON. - * - * Invoked via `tnmsc config --show --json`. - * Writes a `JsonConfigInfo` object to stdout containing: - * - `merged`: the final merged UserConfigFile - * - `sources`: an array of ConfigSource entries describing each layer - * - * When used without `--json`, logs the config info via the logger. - */ -export class ConfigShowCommand implements Command { - readonly name = 'config-show' - - async execute(ctx: CommandContext): Promise { - const {logger} = ctx - const loader = new ConfigLoader() - const mergedResult = loader.load() - - const sources: ConfigSource[] = mergedResult.sources.map(sourcePath => { - const loaded = loader.loadFromFile(sourcePath) - return { - path: sourcePath, - layer: 'global', - config: loaded.config - } - }) - - const configInfo: JsonConfigInfo = { - merged: mergedResult.config, - sources - } - - process.stdout.write(`${JSON.stringify(configInfo)}\n`) - - logger.info('config shown', {sources: mergedResult.sources.length}) - - return { - success: true, - filesAffected: 0, - dirsAffected: 0, - message: `Configuration displayed (${sources.length} source(s))` - } - } -} diff --git a/cli/src/commands/DryRunCleanCommand.ts b/cli/src/commands/DryRunCleanCommand.ts deleted file mode 100644 index 72ce58c5..00000000 --- a/cli/src/commands/DryRunCleanCommand.ts +++ /dev/null @@ -1,74 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import * as path from 'node:path' -import {collectAllPluginOutputs} from '../plugins/plugin-core' -import {logProtectedDeletionGuardError} from '../ProtectedDeletionGuard' -import {collectDeletionTargets} from './CleanupUtils' - -/** - * Dry-run clean command - simulates clean operations without actual deletion - */ -export class DryRunCleanCommand implements Command { - readonly name = 'dry-run-clean' - - async execute(ctx: CommandContext): Promise { - const {logger, outputPlugins, createCleanContext} = ctx - logger.info('running clean pipeline', {command: 'dry-run-clean', dryRun: true}) - - const cleanCtx = createCleanContext(true) - const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx) - - logger.info('collected outputs for cleanup', { - dryRun: true, - projectDirs: outputs.projectDirs.length, - projectFiles: outputs.projectFiles.length, - globalDirs: outputs.globalDirs.length, - globalFiles: outputs.globalFiles.length - }) - - const {filesToDelete, dirsToDelete, emptyDirsToDelete, violations, excludedScanGlobs} = await collectDeletionTargets(outputPlugins, cleanCtx) - const totalDirsToDelete = [...dirsToDelete, ...emptyDirsToDelete] - - if (violations.length > 0) { - logProtectedDeletionGuardError(logger, 'dry-run-cleanup', violations) - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: `Protected deletion guard blocked cleanup for ${violations.length} path(s)` - } - } - - this.logDryRunFiles(filesToDelete, logger) - this.logDryRunDirectories(totalDirsToDelete, logger) - - logger.info('clean complete', { - dryRun: true, - filesAffected: filesToDelete.length, - dirsAffected: totalDirsToDelete.length, - violations: 0, - excludedScanGlobs - }) - - return { - success: true, - filesAffected: filesToDelete.length, - dirsAffected: totalDirsToDelete.length, - message: 'Dry-run complete, no files were deleted' - } - } - - private logDryRunFiles(files: string[], logger: CommandContext['logger']): void { - for (const file of files) { - const resolved = path.isAbsolute(file) ? file : path.resolve(file) - logger.info('would delete file', {path: resolved, dryRun: true}) - } - } - - private logDryRunDirectories(dirs: string[], logger: CommandContext['logger']): void { - const sortedDirs = [...dirs].sort((a, b) => b.length - a.length) - for (const dir of sortedDirs) { - const resolved = path.isAbsolute(dir) ? dir : path.resolve(dir) - logger.info('would delete directory', {path: resolved, dryRun: true}) - } - } -} diff --git a/cli/src/commands/DryRunOutputCommand.ts b/cli/src/commands/DryRunOutputCommand.ts deleted file mode 100644 index 180501f6..00000000 --- a/cli/src/commands/DryRunOutputCommand.ts +++ /dev/null @@ -1,51 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {syncWindowsConfigIntoWsl} from '@/wsl-mirror-sync' -import { - collectOutputDeclarations, - executeDeclarativeWriteOutputs -} from '../plugins/plugin-core' - -/** - * Dry-run output command - simulates write operations without actual I/O - */ -export class DryRunOutputCommand implements Command { - readonly name = 'dry-run-output' - - async execute(ctx: CommandContext): Promise { - const {logger, outputPlugins, createWriteContext} = ctx - logger.info('started', {command: 'dry-run-output', dryRun: true}) - - const writeCtx = createWriteContext(true) - const predeclaredOutputs = await collectOutputDeclarations(outputPlugins, writeCtx) - const results = await executeDeclarativeWriteOutputs(outputPlugins, writeCtx, predeclaredOutputs) - - let totalFiles = 0 - let totalDirs = 0 - for (const [pluginName, result] of results) { - totalFiles += result.files.length - totalDirs += result.dirs.length - logger.info('plugin result', {plugin: pluginName, files: result.files.length, dirs: result.dirs.length, dryRun: true}) - } - - const wslMirrorResult = await syncWindowsConfigIntoWsl(outputPlugins, writeCtx, void 0, predeclaredOutputs) - if (wslMirrorResult.errors.length > 0) { - return { - success: false, - filesAffected: totalFiles, - dirsAffected: totalDirs, - message: wslMirrorResult.errors.join('\n') - } - } - - totalFiles += wslMirrorResult.mirroredFiles - - logger.info('complete', {command: 'dry-run-output', totalFiles, totalDirs, dryRun: true}) - - return { - success: true, - filesAffected: totalFiles, - dirsAffected: totalDirs, - message: 'Dry-run complete, no files were written' - } - } -} diff --git a/cli/src/commands/ExecuteCommand.ts b/cli/src/commands/ExecuteCommand.ts deleted file mode 100644 index 8f4c1c96..00000000 --- a/cli/src/commands/ExecuteCommand.ts +++ /dev/null @@ -1,79 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {syncWindowsConfigIntoWsl} from '@/wsl-mirror-sync' -import { - collectOutputDeclarations, - executeDeclarativeWriteOutputs -} from '../plugins/plugin-core' -import {performCleanup} from './CleanupUtils' - -/** - * Execute command - performs actual write operations - * Includes pre-cleanup to remove stale files before writing new outputs - */ -export class ExecuteCommand implements Command { - readonly name = 'execute' - - async execute(ctx: CommandContext): Promise { - const {logger, outputPlugins, createCleanContext, createWriteContext} = ctx - logger.info('started', {command: 'execute'}) - - const writeCtx = createWriteContext(false) - const predeclaredOutputs = await collectOutputDeclarations(outputPlugins, writeCtx) - const cleanCtx = createCleanContext(false) // Step 1: Pre-cleanup (non-dry-run only) - const cleanupResult = await performCleanup(outputPlugins, cleanCtx, logger, predeclaredOutputs) - - if (cleanupResult.violations.length > 0 || cleanupResult.conflicts.length > 0) { - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - ...cleanupResult.message != null ? {message: cleanupResult.message} : {} - } - } - - logger.info('cleanup complete', {deletedFiles: cleanupResult.deletedFiles, deletedDirs: cleanupResult.deletedDirs}) - - const results = await executeDeclarativeWriteOutputs(outputPlugins, writeCtx, predeclaredOutputs) // Step 2: Write outputs - - let totalFiles = 0 - let totalDirs = 0 - const writeErrors: string[] = [] - for (const result of results.values()) { - totalFiles += result.files.length - totalDirs += result.dirs.length - for (const fileResult of result.files) { - if (!fileResult.success) writeErrors.push(fileResult.error?.message ?? `Failed to write ${fileResult.path}`) - } - } - - if (writeErrors.length > 0) { - return { - success: false, - filesAffected: totalFiles, - dirsAffected: totalDirs, - message: writeErrors.join('\n') - } - } - - const wslMirrorResult = await syncWindowsConfigIntoWsl(outputPlugins, writeCtx, void 0, predeclaredOutputs) - - if (wslMirrorResult.errors.length > 0) { - return { - success: false, - filesAffected: totalFiles, - dirsAffected: totalDirs, - message: wslMirrorResult.errors.join('\n') - } - } - - totalFiles += wslMirrorResult.mirroredFiles - - logger.info('complete', {command: 'execute', pluginCount: results.size}) - - return { - success: true, - filesAffected: totalFiles, - dirsAffected: totalDirs - } - } -} diff --git a/cli/src/commands/HelpCommand.ts b/cli/src/commands/HelpCommand.ts deleted file mode 100644 index ae7201d1..00000000 --- a/cli/src/commands/HelpCommand.ts +++ /dev/null @@ -1,77 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {AINDEX_CONFIG_KEY_PATHS} from '@/plugins/plugin-core' -import {getCliVersion} from './VersionCommand' - -const CLI_NAME = 'tnmsc' -const CONFIG_KEY_LIST_TEXT = ['workspaceDir', 'logLevel', ...AINDEX_CONFIG_KEY_PATHS].join(',\n ') - -const HELP_TEXT = ` -${CLI_NAME} v${getCliVersion()} - Memory Sync CLI - -Synchronize AI memory and configuration files across projects. - -USAGE: - ${CLI_NAME} Run the sync pipeline (default) - ${CLI_NAME} help Show this help message - ${CLI_NAME} version Show version information - ${CLI_NAME} init Deprecated; no longer initializes aindex - ${CLI_NAME} dry-run Preview what would be written - ${CLI_NAME} clean Remove all generated files - ${CLI_NAME} clean --dry-run Preview what would be cleaned - ${CLI_NAME} config key=value Set configuration value - -SUBCOMMANDS: - help Show this help message - version Show version information - init Deprecated; keep public target-relative definitions manually - dry-run Preview changes without writing files - clean Remove all generated output files and directories - config Set configuration values in global config file (~/.aindex/.tnmsc.json) - -ALIASES: - ${CLI_NAME} --help, ${CLI_NAME} -h Same as '${CLI_NAME} help' - ${CLI_NAME} --version, ${CLI_NAME} -v Same as '${CLI_NAME} version' - ${CLI_NAME} clean -n Same as '${CLI_NAME} clean --dry-run' - ${CLI_NAME} config key=value Set config value in global config file - -LOG LEVEL OPTIONS: - --trace Most verbose output - --debug Debug information - --info Standard information (default) - --warn Warnings only - --error Errors only - -CLEAN OPTIONS: - -n, --dry-run Preview cleanup without removing files - -CONFIG OPTIONS: - key=value Set a configuration value in global config (~/.aindex/.tnmsc.json) - Valid keys: ${CONFIG_KEY_LIST_TEXT} - - Examples: - ${CLI_NAME} config workspaceDir=~/my-project - ${CLI_NAME} config aindex.skills.src=skills - ${CLI_NAME} config logLevel=debug - -CONFIGURATION: - Configure via plugin.config.ts in your project root. - See documentation for detailed configuration options. -`.trim() - -/** - * Help command - displays CLI usage information - */ -export class HelpCommand implements Command { - readonly name = 'help' - - async execute(ctx: CommandContext): Promise { - ctx.logger.info(HELP_TEXT) - - return { - success: true, - filesAffected: 0, - dirsAffected: 0, - message: 'Help displayed' - } - } -} diff --git a/cli/src/commands/InitCommand.test.ts b/cli/src/commands/InitCommand.test.ts deleted file mode 100644 index 3224c8f6..00000000 --- a/cli/src/commands/InitCommand.test.ts +++ /dev/null @@ -1,78 +0,0 @@ -import type {CommandContext} from './Command' -import * as fs from 'node:fs' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {createLogger, FilePathKind} from '../plugins/plugin-core' -import {InitCommand} from './InitCommand' - -function createCommandContext(): CommandContext { - const workspaceDir = path.resolve('tmp-init-command') - const userConfigOptions = mergeConfig({workspaceDir}) - - return { - logger: createLogger('InitCommandTest', 'error'), - outputPlugins: [], - userConfigOptions, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - }, - createCleanContext: dryRun => ({ - logger: createLogger('InitCommandTest', 'error'), - fs, - path, - glob, - dryRun, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - } - }) as CommandContext['createCleanContext'] extends (dryRun: boolean) => infer T ? T : never, - createWriteContext: dryRun => ({ - logger: createLogger('InitCommandTest', 'error'), - fs, - path, - glob, - dryRun, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - } - }) as CommandContext['createWriteContext'] extends (dryRun: boolean) => infer T ? T : never - } -} - -describe('init command', () => { - it('returns a deprecation failure without creating files', async () => { - const result = await new InitCommand().execute(createCommandContext()) - - expect(result.success).toBe(false) - expect(result.filesAffected).toBe(0) - expect(result.dirsAffected).toBe(0) - expect(result.message).toContain('deprecated') - expect(result.message).toContain('~/workspace/aindex/public/') - }) -}) diff --git a/cli/src/commands/InitCommand.ts b/cli/src/commands/InitCommand.ts deleted file mode 100644 index 98180fcc..00000000 --- a/cli/src/commands/InitCommand.ts +++ /dev/null @@ -1,36 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {buildUsageDiagnostic, diagnosticLines} from '@/diagnostics' - -const INIT_DEPRECATION_MESSAGE = '`tnmsc init` is deprecated and no longer initializes aindex. Maintain the public target-relative definitions manually under `~/workspace/aindex/public/`.' - -export class InitCommand implements Command { - readonly name = 'init' - - async execute(ctx: CommandContext): Promise { - const {logger} = ctx - - logger.warn(buildUsageDiagnostic({ - code: 'INIT_COMMAND_DEPRECATED', - title: 'The init command is deprecated', - rootCause: diagnosticLines( - '`tnmsc init` no longer initializes aindex content or project definitions.' - ), - exactFix: diagnosticLines( - 'Maintain the target-relative definitions manually under `~/workspace/aindex/public/`.' - ), - possibleFixes: [ - diagnosticLines('Run `tnmsc help` to find a supported replacement command for your workflow.') - ], - details: { - command: 'init' - } - })) - - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: INIT_DEPRECATION_MESSAGE - } - } -} diff --git a/cli/src/commands/JsonOutputCommand.ts b/cli/src/commands/JsonOutputCommand.ts deleted file mode 100644 index 3123e96c..00000000 --- a/cli/src/commands/JsonOutputCommand.ts +++ /dev/null @@ -1,56 +0,0 @@ -import type {Command, CommandContext, CommandResult, JsonCommandResult} from './Command' -import process from 'node:process' -import {partitionBufferedDiagnostics} from '@/diagnostics' -import {clearBufferedDiagnostics, drainBufferedDiagnostics} from '@/plugins/plugin-core' - -/** - * Decorator command that wraps any Command to produce JSON output on stdout. - * - * When the `--json` flag is detected, this wrapper: - * 1. Suppresses all Winston console logging (sets global log level to 'silent') - * 2. Delegates execution to the inner command - * 3. Converts the CommandResult to a JsonCommandResult - * 4. Writes the JSON string to stdout - * - * This ensures clean, parseable JSON output for consumption by - * Tauri sidecar or other external tooling. - */ -export class JsonOutputCommand implements Command { - readonly name: string - private readonly inner: Command - - constructor(inner: Command) { - this.inner = inner - this.name = `json:${inner.name}` - } - - async execute(ctx: CommandContext): Promise { - clearBufferedDiagnostics() - const result = await this.inner.execute(ctx) - const jsonResult = toJsonCommandResult(result, drainBufferedDiagnostics()) - process.stdout.write(`${JSON.stringify(jsonResult)}\n`) - return result - } -} - -/** - * Convert a CommandResult to a JsonCommandResult. - * Maps the base result fields and initialises optional arrays as empty - * when not present, ensuring a consistent JSON shape. - */ -export function toJsonCommandResult( - result: CommandResult, - diagnostics = drainBufferedDiagnostics() -): JsonCommandResult { - const {warnings, errors} = partitionBufferedDiagnostics(diagnostics) - const json: JsonCommandResult = { - success: result.success, - filesAffected: result.filesAffected, - dirsAffected: result.dirsAffected, - ...result.message != null && {message: result.message}, - pluginResults: [], - warnings, - errors - } - return json -} diff --git a/cli/src/commands/PluginsCommand.ts b/cli/src/commands/PluginsCommand.ts deleted file mode 100644 index 8f284a06..00000000 --- a/cli/src/commands/PluginsCommand.ts +++ /dev/null @@ -1,54 +0,0 @@ -import type {Command, CommandContext, CommandResult, JsonPluginInfo} from './Command' -import process from 'node:process' - -/** - * Command that outputs all registered output plugin information as JSON. - * - * Invoked via `tnmsc plugins --json`. - * Writes a `JsonPluginInfo[]` array to stdout containing each output plugin's - * name, description, and dependency list. - * - * When used without `--json`, logs the plugin list via the logger. - */ -export class PluginsCommand implements Command { - readonly name = 'plugins' - - async execute(ctx: CommandContext): Promise { - const {logger, outputPlugins, userConfigOptions} = ctx - - const allPlugins = userConfigOptions.plugins - const pluginInfos: JsonPluginInfo[] = [] - - for (const plugin of allPlugins) { - pluginInfos.push({ - name: plugin.name, - kind: 'Output', - description: plugin.name, - dependencies: [...plugin.dependsOn ?? []] - }) - } - - const registeredNames = new Set(pluginInfos.map(p => p.name)) // (they are registered separately via registerOutputPlugins) // Also include output plugins that may not be in userConfigOptions.plugins - for (const plugin of outputPlugins) { - if (!registeredNames.has(plugin.name)) { - pluginInfos.push({ - name: plugin.name, - kind: 'Output', - description: plugin.name, - dependencies: [...plugin.dependsOn ?? []] - }) - } - } - - process.stdout.write(`${JSON.stringify(pluginInfos)}\n`) - - logger.info('plugins listed', {count: pluginInfos.length}) - - return { - success: true, - filesAffected: 0, - dirsAffected: 0, - message: `Listed ${pluginInfos.length} plugin(s)` - } - } -} diff --git a/cli/src/commands/ProtectedDeletionCommands.test.ts b/cli/src/commands/ProtectedDeletionCommands.test.ts deleted file mode 100644 index 3b431b65..00000000 --- a/cli/src/commands/ProtectedDeletionCommands.test.ts +++ /dev/null @@ -1,277 +0,0 @@ -import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin, OutputWriteContext} from '../plugins/plugin-core' -import type {CommandContext} from './Command' -import * as fs from 'node:fs' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it, vi} from 'vitest' -import {mergeConfig} from '../config' -import {createLogger, FilePathKind, PluginKind} from '../plugins/plugin-core' -import {CleanCommand} from './CleanCommand' -import {DryRunCleanCommand} from './DryRunCleanCommand' -import {ExecuteCommand} from './ExecuteCommand' -import {JsonOutputCommand} from './JsonOutputCommand' - -function createMockLogger(): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger -} - -function createMockOutputPlugin( - cleanup?: OutputCleanupDeclarations, - convertContent?: OutputPlugin['convertContent'] -): OutputPlugin { - return { - type: PluginKind.Output, - name: 'MockOutputPlugin', - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return [{path: path.join(path.resolve('tmp-workspace-command'), 'project-a', 'AGENTS.md'), source: {}}] - }, - async declareCleanupPaths() { - return cleanup ?? {} - }, - async convertContent(declaration, ctx) { - if (convertContent != null) return convertContent(declaration, ctx) - return 'test' - } - } -} - -function createCommandContext( - outputPlugins: readonly OutputPlugin[], - workspaceDir: string = path.resolve('tmp-workspace-command') -): CommandContext { - const aindexDir = path.join(workspaceDir, 'aindex') - const userConfigOptions = mergeConfig({workspaceDir}) - const collectedOutputContext = { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [{ - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceDir, 'project-a') - } - }] - }, - aindexDir - } - - return { - logger: createMockLogger(), - outputPlugins, - collectedOutputContext, - userConfigOptions, - createCleanContext: (dryRun: boolean): OutputCleanContext => ({ - logger: createMockLogger(), - fs, - path, - glob, - collectedOutputContext, - pluginOptions: userConfigOptions, - dryRun - }), - createWriteContext: (dryRun: boolean): OutputWriteContext => ({ - logger: createMockLogger(), - fs, - path, - glob, - collectedOutputContext, - dryRun, - registeredPluginNames: outputPlugins.map(plugin => plugin.name) - }) - } -} - -describe('protected deletion commands', () => { - it('returns failure for clean and dry-run-clean when cleanup hits a protected path', async () => { - const workspaceDir = path.resolve('tmp-workspace-command') - const plugin = createMockOutputPlugin({ - delete: [{kind: 'directory', path: workspaceDir}] - }) - const ctx = createCommandContext([plugin]) - - await expect(new CleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ - success: false, - message: expect.stringContaining('Protected deletion guard blocked cleanup') - })) - await expect(new DryRunCleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ - success: false, - message: expect.stringContaining('Protected deletion guard blocked cleanup') - })) - }) - - it('returns failure before writes run when execute pre-cleanup hits a protected path', async () => { - const workspaceDir = path.resolve('tmp-workspace-command') - const convertContent = vi.fn(async () => 'should-not-write') - const plugin = createMockOutputPlugin({ - delete: [{kind: 'directory', path: workspaceDir}] - }, convertContent) - const ctx = createCommandContext([plugin]) - - await expect(new ExecuteCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ - success: false, - message: expect.stringContaining('Protected deletion guard blocked cleanup') - })) - expect(convertContent).not.toHaveBeenCalled() - }) - - it('returns failure when an output path conflicts with a cleanup protect declaration', async () => { - const outputPath = path.join(path.resolve('tmp-workspace-command'), 'project-a', 'AGENTS.md') - const plugin = createMockOutputPlugin({ - protect: [{kind: 'file', path: outputPath}] - }) - const ctx = createCommandContext([plugin]) - - await expect(new CleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ - success: false, - message: expect.stringContaining('Cleanup protection conflict') - })) - }) - - it('reuses declared outputs across cleanup and write during execute', async () => { - const workspaceDir = path.resolve('tmp-workspace-command-cached') - const outputPath = path.join(workspaceDir, 'project-a', 'AGENTS.md') - let declareOutputFilesCalls = 0 - const plugin: OutputPlugin = { - type: PluginKind.Output, - name: 'CachedOutputPlugin', - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - declareOutputFilesCalls += 1 - return [{path: outputPath, source: {}}] - }, - async declareCleanupPaths() { - return {} - }, - async convertContent() { - return 'cached-output' - } - } - - fs.rmSync(workspaceDir, {recursive: true, force: true}) - fs.mkdirSync(path.join(workspaceDir, 'project-a'), {recursive: true}) - - try { - const ctx = createCommandContext([plugin], workspaceDir) - const result = await new ExecuteCommand().execute(ctx) - - expect(result.success).toBe(true) - expect(declareOutputFilesCalls).toBe(1) - expect(fs.readFileSync(outputPath, 'utf8')).toBe('cached-output') - } - finally { - fs.rmSync(workspaceDir, {recursive: true, force: true}) - } - }) - - it('includes structured diagnostics in JSON output errors', async () => { - const writeSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) - const command = new JsonOutputCommand({ - name: 'mock', - async execute(ctx) { - ctx.logger.error({ - code: 'MOCK_FAILURE', - title: 'Mock command failed', - rootCause: ['The mock command was forced to fail for JSON output testing.'], - exactFix: ['Update the mock command inputs so it no longer emits the test failure.'] - }) - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: 'blocked' - } - } - }) - - try { - await command.execute({ - ...createCommandContext([]), - logger: createLogger('ProtectedDeletionJsonTest', 'silent') - }) - expect(writeSpy).toHaveBeenCalledOnce() - const payload = JSON.parse(String(writeSpy.mock.calls[0]?.[0])) as { - readonly message?: string - readonly warnings: readonly unknown[] - readonly errors: readonly {code: string, title: string, rootCause: readonly string[], copyText: readonly string[]}[] - } - - expect(payload.message).toBe('blocked') - expect(payload.warnings).toEqual([]) - expect(payload.errors).toEqual([ - expect.objectContaining({ - code: 'MOCK_FAILURE', - title: 'Mock command failed', - rootCause: ['The mock command was forced to fail for JSON output testing.'], - copyText: expect.arrayContaining(['[MOCK_FAILURE] Mock command failed']) - }) - ]) - } - finally { - writeSpy.mockRestore() - } - }) - - it('includes workspace empty directories in clean dry-run results', async () => { - const workspaceDir = path.resolve('tmp-workspace-command-dry-run-empty') - const generatedDir = path.join(workspaceDir, 'generated') - const generatedFile = path.join(generatedDir, 'AGENTS.md') - const emptyLeafDir = path.join(workspaceDir, 'scratch', 'empty', 'leaf') - const retainedScratchFile = path.join(workspaceDir, 'scratch', 'keep.md') - const plugin: OutputPlugin = { - type: PluginKind.Output, - name: 'DryRunEmptyDirPlugin', - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return [{path: generatedFile, source: {}}] - }, - async declareCleanupPaths() { - return {} - }, - async convertContent() { - return '' - } - } - - fs.rmSync(workspaceDir, {recursive: true, force: true}) - fs.mkdirSync(generatedDir, {recursive: true}) - fs.mkdirSync(emptyLeafDir, {recursive: true}) - fs.mkdirSync(path.dirname(retainedScratchFile), {recursive: true}) - fs.writeFileSync(generatedFile, '# generated', 'utf8') - fs.writeFileSync(retainedScratchFile, '# keep', 'utf8') - - try { - const ctx = createCommandContext([plugin], workspaceDir) - const result = await new DryRunCleanCommand().execute(ctx) - - expect(result).toEqual(expect.objectContaining({ - success: true, - filesAffected: 1, - dirsAffected: 3 - })) - } - finally { - fs.rmSync(workspaceDir, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/commands/SetCommand.ts b/cli/src/commands/SetCommand.ts deleted file mode 100644 index e69de29b..00000000 diff --git a/cli/src/commands/UnknownCommand.ts b/cli/src/commands/UnknownCommand.ts deleted file mode 100644 index 7a530f42..00000000 --- a/cli/src/commands/UnknownCommand.ts +++ /dev/null @@ -1,34 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {buildUsageDiagnostic, diagnosticLines} from '@/diagnostics' - -/** - * Unknown command - displays error for unrecognized subcommands - */ -export class UnknownCommand implements Command { - readonly name = 'unknown' - - constructor(private readonly unknownCmd: string) { } - - async execute(ctx: CommandContext): Promise { - ctx.logger.error(buildUsageDiagnostic({ - code: 'UNKNOWN_COMMAND', - title: `Unknown tnmsc command: ${this.unknownCmd}`, - rootCause: diagnosticLines(`tnmsc does not recognize the "${this.unknownCmd}" subcommand.`), - exactFix: diagnosticLines('Run `tnmsc help` and invoke one of the supported commands.'), - possibleFixes: [ - diagnosticLines('Check the command spelling and remove unsupported aliases or flags.') - ], - details: { - command: this.unknownCmd - } - })) - ctx.logger.info('run "tnmsc help" for available commands') - - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: `Unknown command: ${this.unknownCmd}` - } - } -} diff --git a/cli/src/commands/VersionCommand.ts b/cli/src/commands/VersionCommand.ts deleted file mode 100644 index 6f03525e..00000000 --- a/cli/src/commands/VersionCommand.ts +++ /dev/null @@ -1,29 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' - -const CLI_NAME = 'tnmsc' - -/** - * Get CLI version from build-time injected constant. - * Falls back to 'unknown' in development mode. - */ -export function getCliVersion(): string { - return typeof __CLI_VERSION__ !== 'undefined' ? __CLI_VERSION__ : 'dev' -} - -/** - * Version command - displays CLI version - */ -export class VersionCommand implements Command { - readonly name = 'version' - - async execute(ctx: CommandContext): Promise { - ctx.logger.info(`${CLI_NAME} v${getCliVersion()}`) - - return { - success: true, - filesAffected: 0, - dirsAffected: 0, - message: 'Version displayed' - } - } -} diff --git a/cli/src/commands/bridge.rs b/cli/src/commands/bridge.rs deleted file mode 100644 index d3d18de0..00000000 --- a/cli/src/commands/bridge.rs +++ /dev/null @@ -1,23 +0,0 @@ -use std::process::ExitCode; - -use crate::bridge::node::run_node_command; - -pub fn execute(json_mode: bool) -> ExitCode { - run_node_command("execute", json_mode, &[]) -} - -pub fn dry_run(json_mode: bool) -> ExitCode { - run_node_command("dry-run", json_mode, &[]) -} - -pub fn clean(json_mode: bool) -> ExitCode { - run_node_command("clean", json_mode, &[]) -} - -pub fn dry_run_clean(json_mode: bool) -> ExitCode { - run_node_command("clean", json_mode, &["--dry-run"]) -} - -pub fn plugins(json_mode: bool) -> ExitCode { - run_node_command("plugins", json_mode, &[]) -} diff --git a/cli/src/commands/config_cmd.rs b/cli/src/commands/config_cmd.rs deleted file mode 100644 index e7eb62b5..00000000 --- a/cli/src/commands/config_cmd.rs +++ /dev/null @@ -1,108 +0,0 @@ -use std::process::ExitCode; - -use crate::diagnostic_helpers::{diagnostic, line, optional_details}; -use serde_json::json; -use tnmsc_logger::create_logger; - -use crate::core::config::{ConfigLoader, get_required_global_config_path}; - -pub fn execute(pairs: &[(String, String)]) -> ExitCode { - let logger = create_logger("config", None); - let result = match ConfigLoader::with_defaults().try_load(std::path::Path::new(".")) { - Ok(result) => result, - Err(error) => { - logger.error(diagnostic( - "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", - "Failed to resolve the global config path", - line("The runtime could not determine which global config file should be updated."), - Some(line( - "Ensure the required global config exists and retry the command.", - )), - None, - optional_details(json!({ "error": error })), - )); - return ExitCode::FAILURE; - } - }; - let mut config = result.config; - - for (key, value) in pairs { - match key.as_str() { - "workspaceDir" => config.workspace_dir = Some(value.clone()), - "logLevel" => config.log_level = Some(value.clone()), - _ => { - logger.warn(diagnostic( - "CONFIG_KEY_UNKNOWN", - "Unknown config key was ignored", - line("The provided config key is not supported by this command."), - Some(line( - "Use one of the supported keys: `workspaceDir`, `logLevel`.", - )), - None, - optional_details(json!({ "key": key })), - )); - } - } - } - - let config_path = match get_required_global_config_path() { - Ok(path) => path, - Err(error) => { - logger.error(diagnostic( - "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", - "Failed to resolve the global config path", - line("The runtime could not determine which global config file should be written."), - Some(line( - "Ensure the required global config exists and retry the command.", - )), - None, - optional_details(json!({ "error": error })), - )); - return ExitCode::FAILURE; - } - }; - match serde_json::to_string_pretty(&config) { - Ok(json) => { - if let Some(parent) = config_path.parent() { - let _ = std::fs::create_dir_all(parent); - } - match std::fs::write(&config_path, &json) { - Ok(()) => { - logger.info( - serde_json::Value::String(format!( - "Config saved to {}", - config_path.display() - )), - None, - ); - ExitCode::SUCCESS - } - Err(e) => { - logger.error(diagnostic( - "CONFIG_WRITE_FAILED", - "Failed to write the global config file", - line("The CLI generated the config JSON but could not write it to disk."), - Some(line("Check that the config path is writable and retry.")), - None, - optional_details(json!({ - "path": config_path.to_string_lossy(), - "error": e.to_string() - })), - )); - ExitCode::FAILURE - } - } - } - Err(e) => { - logger.error(diagnostic( - "CONFIG_SERIALIZATION_FAILED", - "Failed to serialize the config", - line("The config object could not be converted to JSON."), - None, - None, - optional_details(json!({ "error": e.to_string() })), - )); - ExitCode::FAILURE - } - } -} diff --git a/cli/src/commands/config_show.rs b/cli/src/commands/config_show.rs deleted file mode 100644 index 0c9be861..00000000 --- a/cli/src/commands/config_show.rs +++ /dev/null @@ -1,44 +0,0 @@ -use std::process::ExitCode; - -use crate::diagnostic_helpers::{diagnostic, line, optional_details}; -use serde_json::json; -use tnmsc_logger::create_logger; - -use crate::core::config::ConfigLoader; - -pub fn execute() -> ExitCode { - let logger = create_logger("config-show", None); - let result = match ConfigLoader::with_defaults().try_load(std::path::Path::new(".")) { - Ok(result) => result, - Err(error) => { - logger.error(diagnostic( - "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", - "Failed to resolve the global config path", - line("The runtime could not determine which global config file should be shown."), - Some(line( - "Ensure the required global config exists and retry the command.", - )), - None, - optional_details(json!({ "error": error })), - )); - return ExitCode::FAILURE; - } - }; - match serde_json::to_string_pretty(&result.config) { - Ok(json) => { - println!("{json}"); - ExitCode::SUCCESS - } - Err(e) => { - logger.error(diagnostic( - "CONFIG_SERIALIZATION_FAILED", - "Failed to serialize the config", - line("The merged config could not be converted to JSON for display."), - None, - None, - optional_details(json!({ "error": e.to_string() })), - )); - ExitCode::FAILURE - } - } -} diff --git a/cli/src/commands/factories/CleanCommandFactory.ts b/cli/src/commands/factories/CleanCommandFactory.ts deleted file mode 100644 index 017d1025..00000000 --- a/cli/src/commands/factories/CleanCommandFactory.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {CleanCommand} from '../CleanCommand' -import {DryRunCleanCommand} from '../DryRunCleanCommand' - -/** - * Factory for creating CleanCommand or DryRunCleanCommand - * Handles 'clean' subcommand with optional --dry-run flag - */ -export class CleanCommandFactory implements CommandFactory { - canHandle(args: ParsedCliArgs): boolean { - return args.subcommand === 'clean' - } - - createCommand(args: ParsedCliArgs): Command { - if (args.dryRun) return new DryRunCleanCommand() - return new CleanCommand() - } -} diff --git a/cli/src/commands/factories/ConfigCommandFactory.ts b/cli/src/commands/factories/ConfigCommandFactory.ts deleted file mode 100644 index bc7b6fe0..00000000 --- a/cli/src/commands/factories/ConfigCommandFactory.ts +++ /dev/null @@ -1,29 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {ConfigCommand} from '../ConfigCommand' -import {ConfigShowCommand} from '../ConfigShowCommand' - -/** - * Factory for creating ConfigCommand or ConfigShowCommand - * Handles 'config' subcommand with --show flag or key=value arguments - */ -export class ConfigCommandFactory implements CommandFactory { - canHandle(args: ParsedCliArgs): boolean { - return args.subcommand === 'config' - } - - createCommand(args: ParsedCliArgs): Command { - if (args.showFlag) { // Config --show subcommand - return new ConfigShowCommand() - } - - const parsedPositional: [key: string, value: string][] = [] // Parse positional arguments as key=value pairs - for (const arg of args.positional) { - const eqIndex = arg.indexOf('=') - if (eqIndex > 0) parsedPositional.push([arg.slice(0, eqIndex), arg.slice(eqIndex + 1)]) - } - - return new ConfigCommand([...args.setOption, ...parsedPositional]) - } -} diff --git a/cli/src/commands/factories/DryRunCommandFactory.ts b/cli/src/commands/factories/DryRunCommandFactory.ts deleted file mode 100644 index 232901ea..00000000 --- a/cli/src/commands/factories/DryRunCommandFactory.ts +++ /dev/null @@ -1,19 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {DryRunOutputCommand} from '../DryRunOutputCommand' - -/** - * Factory for creating DryRunOutputCommand - * Handles 'dry-run' subcommand - */ -export class DryRunCommandFactory implements CommandFactory { - canHandle(args: ParsedCliArgs): boolean { - return args.subcommand === 'dry-run' - } - - createCommand(args: ParsedCliArgs): Command { - void args - return new DryRunOutputCommand() - } -} diff --git a/cli/src/commands/factories/ExecuteCommandFactory.ts b/cli/src/commands/factories/ExecuteCommandFactory.ts deleted file mode 100644 index d7a6f8dc..00000000 --- a/cli/src/commands/factories/ExecuteCommandFactory.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {ExecuteCommand} from '../ExecuteCommand' - -/** - * Factory for creating ExecuteCommand (default command) - * Handles default execution when no specific subcommand matches - */ -export class ExecuteCommandFactory implements CommandFactory { - canHandle(args: ParsedCliArgs): boolean { // This is a catch-all factory with lowest priority - void args - return true - } - - createCommand(args: ParsedCliArgs): Command { - void args - return new ExecuteCommand() - } -} diff --git a/cli/src/commands/factories/HelpCommandFactory.ts b/cli/src/commands/factories/HelpCommandFactory.ts deleted file mode 100644 index 3b4174a5..00000000 --- a/cli/src/commands/factories/HelpCommandFactory.ts +++ /dev/null @@ -1,22 +0,0 @@ -import type {Command} from '../Command' -import type {PrioritizedCommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {FactoryPriority} from '../CommandFactory' -import {HelpCommand} from '../HelpCommand' - -/** - * Factory for creating HelpCommand - * Handles --help flag and 'help' subcommand - */ -export class HelpCommandFactory implements PrioritizedCommandFactory { - readonly priority = FactoryPriority.Flags - - canHandle(args: ParsedCliArgs): boolean { - return args.helpFlag || args.subcommand === 'help' - } - - createCommand(args: ParsedCliArgs): Command { - void args - return new HelpCommand() - } -} diff --git a/cli/src/commands/factories/InitCommandFactory.ts b/cli/src/commands/factories/InitCommandFactory.ts deleted file mode 100644 index 71f55fca..00000000 --- a/cli/src/commands/factories/InitCommandFactory.ts +++ /dev/null @@ -1,15 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {InitCommand} from '../InitCommand' - -export class InitCommandFactory implements CommandFactory { - canHandle(args: ParsedCliArgs): boolean { - return args.subcommand === 'init' - } - - createCommand(args: ParsedCliArgs): Command { - void args - return new InitCommand() - } -} diff --git a/cli/src/commands/factories/PluginsCommandFactory.ts b/cli/src/commands/factories/PluginsCommandFactory.ts deleted file mode 100644 index 11b25ecb..00000000 --- a/cli/src/commands/factories/PluginsCommandFactory.ts +++ /dev/null @@ -1,19 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {PluginsCommand} from '../PluginsCommand' - -/** - * Factory for creating PluginsCommand - * Handles 'plugins' subcommand - */ -export class PluginsCommandFactory implements CommandFactory { - canHandle(args: ParsedCliArgs): boolean { - return args.subcommand === 'plugins' - } - - createCommand(args: ParsedCliArgs): Command { - void args - return new PluginsCommand() - } -} diff --git a/cli/src/commands/factories/UnknownCommandFactory.ts b/cli/src/commands/factories/UnknownCommandFactory.ts deleted file mode 100644 index 6c97fb62..00000000 --- a/cli/src/commands/factories/UnknownCommandFactory.ts +++ /dev/null @@ -1,22 +0,0 @@ -import type {Command} from '../Command' -import type {PrioritizedCommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {FactoryPriority} from '../CommandFactory' -import {UnknownCommand} from '../UnknownCommand' - -/** - * Factory for creating UnknownCommand - * Handles unknown/invalid subcommands - */ -export class UnknownCommandFactory implements PrioritizedCommandFactory { - readonly priority = FactoryPriority.Unknown - - canHandle(args: ParsedCliArgs): boolean { - return args.unknownCommand != null - } - - createCommand(args: ParsedCliArgs): Command { - if (args.unknownCommand == null) return new UnknownCommand('') - return new UnknownCommand(args.unknownCommand) - } -} diff --git a/cli/src/commands/factories/VersionCommandFactory.ts b/cli/src/commands/factories/VersionCommandFactory.ts deleted file mode 100644 index 95dbc123..00000000 --- a/cli/src/commands/factories/VersionCommandFactory.ts +++ /dev/null @@ -1,22 +0,0 @@ -import type {Command} from '../Command' -import type {PrioritizedCommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {FactoryPriority} from '../CommandFactory' -import {VersionCommand} from '../VersionCommand' - -/** - * Factory for creating VersionCommand - * Handles --version flag and 'version' subcommand - */ -export class VersionCommandFactory implements PrioritizedCommandFactory { - readonly priority = FactoryPriority.Flags - - canHandle(args: ParsedCliArgs): boolean { - return args.versionFlag || args.subcommand === 'version' - } - - createCommand(args: ParsedCliArgs): Command { - void args - return new VersionCommand() - } -} diff --git a/cli/src/commands/help.rs b/cli/src/commands/help.rs deleted file mode 100644 index 94b02bd9..00000000 --- a/cli/src/commands/help.rs +++ /dev/null @@ -1,26 +0,0 @@ -use std::process::ExitCode; - -pub fn execute() -> ExitCode { - println!("tnmsc — Memory Sync CLI"); - println!(); - println!("USAGE:"); - println!(" tnmsc [OPTIONS] [COMMAND]"); - println!(); - println!("COMMANDS:"); - println!(" (default) Sync AI memory and configuration files"); - println!(" dry-run Preview changes without writing files"); - println!(" clean Remove all generated output files"); - println!(" config Set or show configuration values"); - println!(" plugins List all registered plugins"); - println!(" version Show version information"); - println!(" help Show this help message"); - println!(); - println!("OPTIONS:"); - println!(" -j, --json Output results as JSON"); - println!(" --trace Set log level to trace"); - println!(" --debug Set log level to debug"); - println!(" --info Set log level to info"); - println!(" --warn Set log level to warn"); - println!(" --error Set log level to error"); - ExitCode::SUCCESS -} diff --git a/cli/src/commands/mod.rs b/cli/src/commands/mod.rs deleted file mode 100644 index cad337be..00000000 --- a/cli/src/commands/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub mod bridge; -pub mod config_cmd; -pub mod config_show; -pub mod help; -pub mod version; diff --git a/cli/src/commands/version.rs b/cli/src/commands/version.rs deleted file mode 100644 index 8321606a..00000000 --- a/cli/src/commands/version.rs +++ /dev/null @@ -1,6 +0,0 @@ -use std::process::ExitCode; - -pub fn execute() -> ExitCode { - println!("{}", env!("CARGO_PKG_VERSION")); - ExitCode::SUCCESS -} diff --git a/cli/src/config.outputScopes.test.ts b/cli/src/config.outputScopes.test.ts deleted file mode 100644 index a5b9e7ae..00000000 --- a/cli/src/config.outputScopes.test.ts +++ /dev/null @@ -1,45 +0,0 @@ -import {describe, expect, it} from 'vitest' -import {mergeConfig} from './config' - -describe('mergeConfig outputScopes', () => { - it('merges plugin topic overrides deeply', () => { - const merged = mergeConfig( - { - outputScopes: { - plugins: { - CursorOutputPlugin: { - commands: 'global', - skills: ['workspace', 'global'] - } - } - } - }, - { - outputScopes: { - plugins: { - CursorOutputPlugin: { - rules: 'project', - skills: 'project' - }, - OpencodeCLIOutputPlugin: { - mcp: 'global' - } - } - } - } - ) - - expect(merged.outputScopes).toEqual({ - plugins: { - CursorOutputPlugin: { - commands: 'global', - skills: 'project', - rules: 'project' - }, - OpencodeCLIOutputPlugin: { - mcp: 'global' - } - } - }) - }) -}) diff --git a/cli/src/config.plugins-fast-path.test.ts b/cli/src/config.plugins-fast-path.test.ts deleted file mode 100644 index 6dc21219..00000000 --- a/cli/src/config.plugins-fast-path.test.ts +++ /dev/null @@ -1,50 +0,0 @@ -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import {afterEach, describe, expect, it, vi} from 'vitest' - -import {defineConfig} from './config' - -const {collectInputContextMock} = vi.hoisted(() => ({ - collectInputContextMock: vi.fn(async () => { - throw new Error('collectInputContext should not run for plugins fast path') - }) -})) - -vi.mock('./inputs/runtime', async importOriginal => { - const actual = await importOriginal() - - return { - ...actual, - collectInputContext: collectInputContextMock - } -}) - -afterEach(() => { - vi.clearAllMocks() -}) - -describe('defineConfig plugins fast path', () => { - it('skips input collection for plugins runtime commands', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-plugins-fast-path-')) - - try { - const result = await defineConfig({ - loadUserConfig: false, - pipelineArgs: ['node', 'tnmsc', 'plugins', '--json'], - pluginOptions: { - workspaceDir: tempWorkspace, - plugins: [] - } - }) - - expect(collectInputContextMock).not.toHaveBeenCalled() - expect(result.context.workspace.directory.path).toBe(tempWorkspace) - expect(result.context.aindexDir).toBe(path.join(tempWorkspace, 'aindex')) - expect(result.outputPlugins).toEqual([]) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/config.test.ts b/cli/src/config.test.ts deleted file mode 100644 index 2b94ef42..00000000 --- a/cli/src/config.test.ts +++ /dev/null @@ -1,173 +0,0 @@ -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import {afterEach, describe, expect, it, vi} from 'vitest' -import {defineConfig} from './config' -import {WorkspaceInputCapability} from './inputs/input-workspace' - -describe('defineConfig', () => { - const originalHome = process.env.HOME - const originalUserProfile = process.env.USERPROFILE - const originalHomeDrive = process.env.HOMEDRIVE - const originalHomePath = process.env.HOMEPATH - - afterEach(() => { - process.env.HOME = originalHome - process.env.USERPROFILE = originalUserProfile - process.env.HOMEDRIVE = originalHomeDrive - process.env.HOMEPATH = originalHomePath - vi.restoreAllMocks() - }) - - it('loads config only from ~/.aindex/.tnmsc.json', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-workspace-')) - const tempHome = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-home-')) - const globalConfigDir = path.join(tempHome, '.aindex') - const globalConfigPath = path.join(globalConfigDir, '.tnmsc.json') - const localConfigPath = path.join(tempWorkspace, '.tnmsc.json') - - process.env.HOME = tempHome - process.env.USERPROFILE = tempHome - delete process.env.HOMEDRIVE - delete process.env.HOMEPATH - - fs.mkdirSync(globalConfigDir, {recursive: true}) - fs.writeFileSync(globalConfigPath, JSON.stringify({ - workspaceDir: tempWorkspace, - aindex: { - dir: 'aindex', - skills: {src: 'skills', dist: 'dist/skills'}, - commands: {src: 'commands', dist: 'dist/commands'}, - subAgents: {src: 'subagents', dist: 'dist/subagents'}, - rules: {src: 'rules', dist: 'dist/rules'}, - globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, - workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, - app: {src: 'app', dist: 'dist/app'}, - ext: {src: 'ext', dist: 'dist/ext'}, - arch: {src: 'arch', dist: 'dist/arch'}, - softwares: {src: 'softwares', dist: 'dist/softwares'} - }, - logLevel: 'info' - }), 'utf8') - fs.writeFileSync(localConfigPath, JSON.stringify({workspaceDir: '/wrong/workspace', logLevel: 'error'}), 'utf8') - - try { - const result = await defineConfig({cwd: tempWorkspace}) - - expect(result.userConfigOptions.workspaceDir).toBe(tempWorkspace) - expect(result.userConfigOptions.aindex.softwares).toEqual({src: 'softwares', dist: 'dist/softwares'}) - expect(result.context.workspace.directory.path).toBe(tempWorkspace) - expect(result.context.aindexDir).toBe(path.join(tempWorkspace, 'aindex')) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - fs.rmSync(tempHome, {recursive: true, force: true}) - } - }) - - it('passes pipeline args into public proxy resolution', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-public-proxy-command-')) - const aindexDir = path.join(tempWorkspace, 'aindex') - const publicDir = path.join(aindexDir, 'public') - - fs.mkdirSync(path.join(publicDir, 'execute'), {recursive: true}) - fs.mkdirSync(path.join(publicDir, 'dry-run'), {recursive: true}) - fs.writeFileSync(path.join(publicDir, 'proxy.ts'), [ - 'export default (_logicalPath, ctx) => ctx.command === "dry-run"', - ' ? "dry-run/gitignore"', - ' : "execute/gitignore"', - '' - ].join('\n'), 'utf8') - fs.writeFileSync(path.join(publicDir, 'execute', 'gitignore'), 'execute\n', 'utf8') - fs.writeFileSync(path.join(publicDir, 'dry-run', 'gitignore'), 'dry-run\n', 'utf8') - - try { - const result = await defineConfig({ - loadUserConfig: false, - pipelineArgs: ['node', 'tnmsc', 'dry-run'], - pluginOptions: { - workspaceDir: tempWorkspace - } - }) - - expect(result.context.globalGitIgnore).toBe('dry-run\n') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('does not run builtin mutating input effects when plugins is explicitly empty', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-explicit-empty-plugins-')) - const orphanSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'orphan-skill') - const orphanSkillFile = path.join(orphanSkillDir, 'SKILL.md') - - fs.mkdirSync(orphanSkillDir, {recursive: true}) - fs.writeFileSync(orphanSkillFile, 'orphan\n', 'utf8') - - try { - const result = await defineConfig({ - loadUserConfig: false, - pluginOptions: { - workspaceDir: tempWorkspace, - plugins: [] - } - }) - - expect(result.context.workspace.directory.path).toBe(tempWorkspace) - expect(fs.existsSync(orphanSkillFile)).toBe(true) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('does not run builtin mutating input effects when shorthand plugins is explicitly empty', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-shorthand-empty-plugins-')) - const tempHome = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-shorthand-empty-home-')) - const orphanSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'orphan-skill') - const orphanSkillFile = path.join(orphanSkillDir, 'SKILL.md') - - process.env.HOME = tempHome - process.env.USERPROFILE = tempHome - delete process.env.HOMEDRIVE - delete process.env.HOMEPATH - - fs.mkdirSync(orphanSkillDir, {recursive: true}) - fs.writeFileSync(orphanSkillFile, 'orphan\n', 'utf8') - - try { - const result = await defineConfig({ - workspaceDir: tempWorkspace, - plugins: [] - }) - - expect(result.context.workspace.directory.path).toBe(tempWorkspace) - expect(fs.existsSync(orphanSkillFile)).toBe(true) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - fs.rmSync(tempHome, {recursive: true, force: true}) - } - }) - - it('accepts legacy input capabilities in pluginOptions.plugins without crashing', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-legacy-input-capabilities-')) - - try { - const result = await defineConfig({ - loadUserConfig: false, - pluginOptions: { - workspaceDir: tempWorkspace, - plugins: [new WorkspaceInputCapability()] - } - }) - - expect(result.context.workspace.directory.path).toBe(tempWorkspace) - expect(result.outputPlugins).toEqual([]) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/config.ts b/cli/src/config.ts deleted file mode 100644 index eff2e696..00000000 --- a/cli/src/config.ts +++ /dev/null @@ -1,475 +0,0 @@ -import type { - AindexConfig, - CleanupProtectionOptions, - CommandSeriesOptions, - CommandSeriesPluginOverride, - ConfigLoaderOptions, - InputCapability, - InputCollectedContext, - OutputCollectedContext, - OutputPlugin, - OutputScopeOptions, - PluginOptions, - PluginOutputScopeTopics, - UserConfigFile, - WindowsOptions -} from './plugins/plugin-core' -import * as path from 'node:path' -import {createLogger} from '@truenine/logger' -import {checkVersionControl} from './Aindex' -import {getConfigLoader} from './ConfigLoader' -import {collectInputContext, resolveRuntimeCommand} from './inputs/runtime' -import { - buildDefaultAindexConfig, - FilePathKind, - mergeAindexConfig, - PathPlaceholders, - toOutputCollectedContext, - validateOutputScopeOverridesForPlugins -} from './plugins/plugin-core' -import {resolveUserPath} from './runtime-environment' - -/** - * Pipeline configuration containing collected context and output plugins - */ -export interface PipelineConfig { - readonly context: OutputCollectedContext - readonly outputPlugins: readonly OutputPlugin[] - readonly userConfigOptions: Required -} - -interface ResolvedPluginSetup { - readonly mergedOptions: Required - readonly outputPlugins: readonly OutputPlugin[] - readonly inputCapabilities: readonly InputCapability[] - readonly userConfigFile?: UserConfigFile -} - -function isOutputPlugin(plugin: InputCapability | OutputPlugin): plugin is OutputPlugin { - return 'declarativeOutput' in plugin -} - -function isInputCapability(plugin: InputCapability | OutputPlugin): plugin is InputCapability { - return 'collect' in plugin && !isOutputPlugin(plugin) -} - -const DEFAULT_AINDEX: Required = buildDefaultAindexConfig() - -const DEFAULT_OPTIONS: Required = { - version: '0.0.0', - workspaceDir: '~/project', - logLevel: 'info', - aindex: DEFAULT_AINDEX, - commandSeriesOptions: {}, - outputScopes: {}, - frontMatter: { - blankLineAfter: true - }, - cleanupProtection: {}, - windows: {}, - plugins: [] -} - -/** - * Convert UserConfigFile to PluginOptions - * UserConfigFile is the JSON schema, PluginOptions includes plugins - */ -export function userConfigToPluginOptions(userConfig: UserConfigFile): Partial { - return { - ...userConfig.version != null ? {version: userConfig.version} : {}, - ...userConfig.workspaceDir != null ? {workspaceDir: userConfig.workspaceDir} : {}, - ...userConfig.aindex != null ? {aindex: userConfig.aindex} : {}, - ...userConfig.commandSeriesOptions != null ? {commandSeriesOptions: userConfig.commandSeriesOptions} : {}, - ...userConfig.outputScopes != null ? {outputScopes: userConfig.outputScopes} : {}, - ...userConfig.frontMatter != null ? {frontMatter: userConfig.frontMatter} : {}, - ...userConfig.cleanupProtection != null ? {cleanupProtection: userConfig.cleanupProtection} : {}, - ...userConfig.windows != null ? {windows: userConfig.windows} : {}, - ...userConfig.logLevel != null ? {logLevel: userConfig.logLevel} : {} - } -} - -/** - * Options for defineConfig - */ -export interface DefineConfigOptions { - readonly pluginOptions?: PluginOptions - - readonly configLoaderOptions?: ConfigLoaderOptions - - readonly loadUserConfig?: boolean - - readonly cwd?: string - - readonly pipelineArgs?: readonly string[] -} - -/** - * Merge multiple PluginOptions with default configuration. - * Later options override earlier ones. - * Similar to vite/vitest mergeConfig. - */ -export function mergeConfig( - ...configs: Partial[] -): Required { - const initialConfig: Required = {...DEFAULT_OPTIONS} - return configs.reduce( - (acc: Required, config) => mergeTwoConfigs(acc, config), - initialConfig - ) -} - -function mergeTwoConfigs( - base: Required, - override: Partial -): Required { - const overridePlugins = override.plugins - const overrideCommandSeries = override.commandSeriesOptions - const overrideOutputScopes = override.outputScopes - const overrideFrontMatter = override.frontMatter - const overrideCleanupProtection = override.cleanupProtection - const overrideWindows = override.windows - - return { - ...base, - ...override, - aindex: mergeAindexConfig(base.aindex, override.aindex), - plugins: [ // Array concatenation for plugins - ...base.plugins, - ...overridePlugins ?? [] - ], - commandSeriesOptions: mergeCommandSeriesOptions(base.commandSeriesOptions, overrideCommandSeries), // Deep merge for commandSeriesOptions - outputScopes: mergeOutputScopeOptions(base.outputScopes, overrideOutputScopes), - frontMatter: mergeFrontMatterOptions(base.frontMatter, overrideFrontMatter), - cleanupProtection: mergeCleanupProtectionOptions(base.cleanupProtection, overrideCleanupProtection), - windows: mergeWindowsOptions(base.windows, overrideWindows) - } -} - -function mergeCommandSeriesOptions( - base?: CommandSeriesOptions, - override?: CommandSeriesOptions -): CommandSeriesOptions { - if (override == null) return base ?? {} - if (base == null) return override - - const mergedPluginOverrides: Record = {} // Merge pluginOverrides deeply - - if (base.pluginOverrides != null) { // Copy base plugin overrides - for (const [key, value] of Object.entries(base.pluginOverrides)) mergedPluginOverrides[key] = {...value} - } - - if (override.pluginOverrides != null) { // Merge override plugin overrides - for (const [key, value] of Object.entries(override.pluginOverrides)) { - mergedPluginOverrides[key] = { - ...mergedPluginOverrides[key], - ...value - } - } - } - - const includeSeriesPrefix = override.includeSeriesPrefix ?? base.includeSeriesPrefix // Build result with conditional properties to satisfy exactOptionalPropertyTypes - const hasPluginOverrides = Object.keys(mergedPluginOverrides).length > 0 - - if (includeSeriesPrefix != null && hasPluginOverrides) return {includeSeriesPrefix, pluginOverrides: mergedPluginOverrides} - if (includeSeriesPrefix != null) return {includeSeriesPrefix} - if (hasPluginOverrides) return {pluginOverrides: mergedPluginOverrides} - return {} -} - -function mergeOutputScopeTopics( - base?: PluginOutputScopeTopics, - override?: PluginOutputScopeTopics -): PluginOutputScopeTopics | undefined { - if (base == null && override == null) return void 0 - if (base == null) return override - if (override == null) return base - return {...base, ...override} -} - -function mergeOutputScopeOptions( - base?: OutputScopeOptions, - override?: OutputScopeOptions -): OutputScopeOptions { - if (override == null) return base ?? {} - if (base == null) return override - - const mergedPlugins: Record = {} - if (base.plugins != null) { - for (const [pluginName, topics] of Object.entries(base.plugins)) { - if (topics != null) mergedPlugins[pluginName] = {...topics} - } - } - if (override.plugins != null) { - for (const [pluginName, topics] of Object.entries(override.plugins)) { - const mergedTopics = mergeOutputScopeTopics(mergedPlugins[pluginName], topics) - if (mergedTopics != null) mergedPlugins[pluginName] = mergedTopics - } - } - - if (Object.keys(mergedPlugins).length === 0) return {} - return {plugins: mergedPlugins} -} - -function mergeFrontMatterOptions( - base: Required['frontMatter'], - override?: PluginOptions['frontMatter'] -): Required['frontMatter'] { - if (override == null) return base - return { - ...base, - ...override - } -} - -function mergeCleanupProtectionOptions( - base?: CleanupProtectionOptions, - override?: CleanupProtectionOptions -): CleanupProtectionOptions { - if (override == null) return base ?? {} - if (base == null) return override - - return { - rules: [ - ...base.rules ?? [], - ...override.rules ?? [] - ] - } -} - -function mergeWindowsOptions( - base?: WindowsOptions, - override?: WindowsOptions -): WindowsOptions { - if (override == null) return base ?? {} - if (base == null) return override - - const baseWsl2 = base.wsl2 - const overrideWsl2 = override.wsl2 - - return { - ...base, - ...override, - ...baseWsl2 != null || overrideWsl2 != null - ? { - wsl2: { - ...baseWsl2, - ...overrideWsl2 - } - } - : {} - } -} - -/** - * Check if options is DefineConfigOptions - */ -function isDefineConfigOptions(options: PluginOptions | DefineConfigOptions): options is DefineConfigOptions { - return 'pluginOptions' in options - || 'configLoaderOptions' in options - || 'loadUserConfig' in options - || 'cwd' in options - || 'pipelineArgs' in options -} - -function getProgrammaticPluginDeclaration( - options: PluginOptions | DefineConfigOptions -): { - readonly hasExplicitProgrammaticPlugins: boolean - readonly explicitProgrammaticPlugins?: PluginOptions['plugins'] -} { - if (isDefineConfigOptions(options)) { - return { - hasExplicitProgrammaticPlugins: Object.hasOwn(options.pluginOptions ?? {}, 'plugins'), - explicitProgrammaticPlugins: options.pluginOptions?.plugins - } - } - - return { - hasExplicitProgrammaticPlugins: Object.hasOwn(options, 'plugins'), - explicitProgrammaticPlugins: options.plugins - } -} - -function resolvePathForMinimalContext(rawPath: string, workspaceDir: string): string { - let resolvedPath = rawPath - - if (resolvedPath.includes(PathPlaceholders.WORKSPACE)) { - resolvedPath = resolvedPath.replace(PathPlaceholders.WORKSPACE, workspaceDir) - } - - return path.normalize(resolveUserPath(resolvedPath)) -} - -function createMinimalOutputCollectedContext( - options: Required -): OutputCollectedContext { - const workspaceDir = resolvePathForMinimalContext(options.workspaceDir, '') - const aindexDir = path.join(workspaceDir, options.aindex.dir) - - return toOutputCollectedContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir) - }, - projects: [] - }, - aindexDir - }) -} - -function shouldUsePluginsFastPath(pipelineArgs?: readonly string[]): boolean { - return resolveRuntimeCommand(pipelineArgs) === 'plugins' -} - -async function resolvePluginSetup( - options: PluginOptions | DefineConfigOptions = {} -): Promise< - ResolvedPluginSetup & { - readonly pipelineArgs?: readonly string[] - readonly userConfigFound: boolean - readonly userConfigSources: readonly string[] - } -> { - let shouldLoadUserConfig: boolean, - cwd: string | undefined, - pluginOptions: PluginOptions, - configLoaderOptions: ConfigLoaderOptions | undefined, - pipelineArgs: readonly string[] | undefined - - if (isDefineConfigOptions(options)) { - ({ - pluginOptions = {}, - cwd, - configLoaderOptions, - pipelineArgs - } = { - pluginOptions: options.pluginOptions, - cwd: options.cwd, - configLoaderOptions: options.configLoaderOptions, - pipelineArgs: options.pipelineArgs - }) - shouldLoadUserConfig = options.loadUserConfig ?? true - } else { - pluginOptions = options - shouldLoadUserConfig = true - configLoaderOptions = void 0 - pipelineArgs = void 0 - } - - let userConfigOptions: Partial = {} - let userConfigFound = false - let userConfigSources: readonly string[] = [] - let userConfigFile: UserConfigFile | undefined - - if (shouldLoadUserConfig) { - try { - const userConfigResult = getConfigLoader(configLoaderOptions).load(cwd) - userConfigFound = userConfigResult.found - userConfigSources = userConfigResult.sources - if (userConfigResult.found) { - userConfigOptions = userConfigToPluginOptions(userConfigResult.config) - userConfigFile = userConfigResult.config - } - } - catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) - throw new Error(`Failed to load user config: ${errorMessage}`) - } - } - - const mergedOptions = mergeConfig(userConfigOptions, pluginOptions) - const {plugins = [], logLevel} = mergedOptions - const logger = createLogger('defineConfig', logLevel) - - if (userConfigFound) { - logger.info('user config loaded', {sources: userConfigSources}) - } else { - logger.info('no user config found, using defaults/programmatic options', { - workspaceDir: mergedOptions.workspaceDir, - aindexDir: mergedOptions.aindex.dir, - logLevel: mergedOptions.logLevel - }) - } - - const outputPlugins = plugins.filter(isOutputPlugin) - const inputCapabilities = plugins.filter(isInputCapability) - validateOutputScopeOverridesForPlugins(outputPlugins, mergedOptions) - - return { - mergedOptions, - outputPlugins, - inputCapabilities, - ...userConfigFile != null && {userConfigFile}, - ...pipelineArgs != null && {pipelineArgs}, - userConfigFound, - userConfigSources - } -} - -/** - * Define configuration with support for user config files. - * - * Configuration priority (highest to lowest): - * 1. Programmatic options passed to defineConfig - * 2. Global config file (~/.aindex/.tnmsc.json) - * 3. Default values - * - * @param options - Plugin options or DefineConfigOptions - */ -export async function defineConfig(options: PluginOptions | DefineConfigOptions = {}): Promise { - const { - hasExplicitProgrammaticPlugins, - explicitProgrammaticPlugins - } = getProgrammaticPluginDeclaration(options) - const { - mergedOptions, - outputPlugins, - inputCapabilities, - userConfigFile, - pipelineArgs - } = await resolvePluginSetup(options) - const logger = createLogger('defineConfig', mergedOptions.logLevel) - - if (shouldUsePluginsFastPath(pipelineArgs)) { - const context = createMinimalOutputCollectedContext(mergedOptions) - return {context, outputPlugins, userConfigOptions: mergedOptions} - } - - const merged = await collectInputContext({ - userConfigOptions: mergedOptions, - ...inputCapabilities.length > 0 ? {capabilities: inputCapabilities} : {}, - includeBuiltinEffects: !(inputCapabilities.length > 0 || (hasExplicitProgrammaticPlugins && (explicitProgrammaticPlugins?.length ?? 0) === 0)), - ...pipelineArgs != null ? {pipelineArgs} : {}, - ...userConfigFile != null ? {userConfig: userConfigFile} : {} - }) - - if (merged.workspace == null) throw new Error('Workspace not initialized by any plugin') - - const inputContext: InputCollectedContext = { - workspace: merged.workspace, - ...merged.vscodeConfigFiles != null && {vscodeConfigFiles: merged.vscodeConfigFiles}, - ...merged.zedConfigFiles != null && {zedConfigFiles: merged.zedConfigFiles}, - ...merged.jetbrainsConfigFiles != null && {jetbrainsConfigFiles: merged.jetbrainsConfigFiles}, - ...merged.editorConfigFiles != null && {editorConfigFiles: merged.editorConfigFiles}, - ...merged.commands != null && {commands: merged.commands}, - ...merged.subAgents != null && {subAgents: merged.subAgents}, - ...merged.skills != null && {skills: merged.skills}, - ...merged.rules != null && {rules: merged.rules}, - ...merged.globalMemory != null && {globalMemory: merged.globalMemory}, - ...merged.aiAgentIgnoreConfigFiles != null && {aiAgentIgnoreConfigFiles: merged.aiAgentIgnoreConfigFiles}, - ...merged.aindexDir != null && {aindexDir: merged.aindexDir}, - ...merged.readmePrompts != null && {readmePrompts: merged.readmePrompts}, - ...merged.globalGitIgnore != null && {globalGitIgnore: merged.globalGitIgnore}, - ...merged.shadowGitExclude != null && {shadowGitExclude: merged.shadowGitExclude} - } - - const context = toOutputCollectedContext(inputContext) - - if (merged.aindexDir != null) { - checkVersionControl(merged.aindexDir, logger) - } - - return {context, outputPlugins, userConfigOptions: mergedOptions} -} diff --git a/cli/src/core/cleanup.rs b/cli/src/core/cleanup.rs deleted file mode 100644 index 367b79c9..00000000 --- a/cli/src/core/cleanup.rs +++ /dev/null @@ -1,2309 +0,0 @@ -use std::collections::{BTreeSet, HashMap, HashSet}; -use std::env; -use std::fs; -use std::path::{Component, Path, PathBuf}; - -use globset::{Glob, GlobBuilder, GlobSet, GlobSetBuilder}; -use serde::{Deserialize, Serialize}; -use walkdir::WalkDir; - -use crate::core::{config, desk_paths}; - -const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS: [&str; 6] = [ - "**/node_modules/**", - "**/.git/**", - "**/.turbo/**", - "**/.pnpm-store/**", - "**/.yarn/**", - "**/.next/**", -]; - -const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES: [&str; 17] = [ - ".git", - "node_modules", - "dist", - "target", - ".next", - ".turbo", - "coverage", - ".nyc_output", - ".cache", - ".vite", - ".vite-temp", - ".pnpm-store", - ".yarn", - ".idea", - ".vscode", - ".volumes", - "volumes", -]; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum ProtectionModeDto { - Direct, - Recursive, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum ProtectionRuleMatcherDto { - Path, - Glob, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum CleanupTargetKindDto { - File, - Directory, - Glob, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum CleanupErrorKindDto { - File, - Directory, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupTargetDto { - pub path: String, - pub kind: CleanupTargetKindDto, - #[serde(default)] - pub exclude_basenames: Vec, - pub protection_mode: Option, - pub scope: Option, - pub label: Option, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupDeclarationsDto { - #[serde(default)] - pub delete: Vec, - #[serde(default)] - pub protect: Vec, - #[serde(default)] - pub exclude_scan_globs: Vec, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct PluginCleanupSnapshotDto { - pub plugin_name: String, - #[serde(default)] - pub outputs: Vec, - #[serde(default)] - pub cleanup: CleanupDeclarationsDto, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ProtectedRuleDto { - pub path: String, - pub protection_mode: ProtectionModeDto, - pub reason: String, - pub source: String, - pub matcher: Option, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupSnapshot { - pub workspace_dir: String, - pub aindex_dir: Option, - #[serde(default)] - pub project_roots: Vec, - #[serde(default)] - pub protected_rules: Vec, - #[serde(default)] - pub plugin_snapshots: Vec, - /// Glob patterns from aindex.config.ts that should be excluded from - /// the empty-directory scanner (git-style ** patterns supported). - #[serde(default)] - pub empty_dir_exclude_globs: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ProtectedPathViolationDto { - pub target_path: String, - pub protected_path: String, - pub protection_mode: ProtectionModeDto, - pub reason: String, - pub source: String, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupProtectionConflictDto { - pub output_path: String, - pub output_plugin: String, - pub protected_path: String, - pub protection_mode: ProtectionModeDto, - pub protected_by: String, - pub reason: String, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupPlan { - pub files_to_delete: Vec, - pub dirs_to_delete: Vec, - pub empty_dirs_to_delete: Vec, - pub violations: Vec, - pub conflicts: Vec, - pub excluded_scan_globs: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupErrorDto { - pub path: String, - pub kind: CleanupErrorKindDto, - pub error: String, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CleanupExecutionResultDto { - pub deleted_files: usize, - pub deleted_dirs: usize, - pub errors: Vec, - pub violations: Vec, - pub conflicts: Vec, - pub files_to_delete: Vec, - pub dirs_to_delete: Vec, - pub empty_dirs_to_delete: Vec, - pub excluded_scan_globs: Vec, -} - -#[derive(Debug, Clone)] -struct CompiledProtectedRule { - path: String, - protection_mode: ProtectionModeDto, - reason: String, - source: String, - comparison_keys: Vec, - normalized_path: String, - specificity: usize, -} - -#[derive(Debug, Clone)] -struct ProtectedDeletionGuard { - compiled_rules: Vec, -} - -struct PartitionResult { - safe_paths: Vec, - violations: Vec, -} - -fn resolve_home_dir() -> PathBuf { - let runtime_environment = config::resolve_runtime_environment(); - runtime_environment - .effective_home_dir - .or(runtime_environment.native_home_dir) - .unwrap_or_else(|| PathBuf::from("/")) -} - -fn expand_home_path(raw_path: &str) -> PathBuf { - if raw_path == "~" || raw_path.starts_with("~/") || raw_path.starts_with("~\\") { - return config::resolve_tilde(raw_path); - } - PathBuf::from(raw_path) -} - -fn normalize_path(path: &Path) -> PathBuf { - let mut normalized = PathBuf::new(); - - for component in path.components() { - match component { - Component::Prefix(prefix) => normalized.push(prefix.as_os_str()), - Component::RootDir => normalized.push(Path::new(std::path::MAIN_SEPARATOR_STR)), - Component::CurDir => {} - Component::ParentDir => { - let popped = normalized.pop(); - if !popped && !path.is_absolute() { - normalized.push(".."); - } - } - Component::Normal(segment) => normalized.push(segment), - } - } - - if normalized.as_os_str().is_empty() { - if path.is_absolute() { - return PathBuf::from(std::path::MAIN_SEPARATOR_STR); - } - return PathBuf::from("."); - } - - normalized -} - -fn resolve_absolute_path(raw_path: &str) -> PathBuf { - let expanded = expand_home_path(raw_path); - let candidate = if expanded.is_absolute() { - expanded - } else { - env::current_dir() - .unwrap_or_else(|_| PathBuf::from(".")) - .join(expanded) - }; - - normalize_path(&candidate) -} - -fn path_to_string(path: &Path) -> String { - normalize_path(path).to_string_lossy().into_owned() -} - -fn path_to_glob_string(path: &Path) -> String { - path_to_string(path).replace('\\', "/") -} - -fn normalize_glob_pattern(pattern: &str) -> String { - path_to_glob_string(&resolve_absolute_path(pattern)) -} - -fn normalize_relative_glob_pattern(pattern: &str) -> String { - let normalized = pattern.replace('\\', "/"); - let normalized = normalized.trim_start_matches("./"); - normalized.trim_start_matches('/').to_string() -} - -fn normalize_workspace_relative_path(path: &Path, workspace_dir: &Path) -> Option { - let relative = path.strip_prefix(workspace_dir).ok()?; - let relative = path_to_glob_string(relative); - Some(relative.trim_start_matches('/').to_string()) -} - -fn normalize_for_comparison(raw_path: &str) -> String { - let normalized = path_to_string(&resolve_absolute_path(raw_path)); - if cfg!(windows) { - normalized.to_lowercase() - } else { - normalized - } -} - -fn build_comparison_keys(raw_path: &str) -> Vec { - let absolute = resolve_absolute_path(raw_path); - let mut keys = HashSet::from([normalize_for_comparison(&path_to_string(&absolute))]); - - if let Ok(real_path) = fs::canonicalize(&absolute) { - keys.insert(normalize_for_comparison(&path_to_string(&real_path))); - } - - let mut collected = keys.into_iter().collect::>(); - collected.sort(); - collected -} - -fn is_same_or_child_path(candidate: &str, parent: &str) -> bool { - if candidate == parent { - return true; - } - - let separator = std::path::MAIN_SEPARATOR; - let prefix = if parent.ends_with(separator) { - parent.to_string() - } else { - format!("{parent}{separator}") - }; - - candidate.starts_with(&prefix) -} - -fn create_protected_rule( - raw_path: &str, - protection_mode: ProtectionModeDto, - reason: impl Into, - source: impl Into, - matcher: Option, -) -> ProtectedRuleDto { - ProtectedRuleDto { - path: path_to_string(&resolve_absolute_path(raw_path)), - protection_mode, - reason: reason.into(), - source: source.into(), - matcher, - } -} - -fn compile_rule(rule: &ProtectedRuleDto) -> CompiledProtectedRule { - let normalized_path = normalize_for_comparison(&rule.path); - CompiledProtectedRule { - path: path_to_string(&resolve_absolute_path(&rule.path)), - protection_mode: rule.protection_mode, - reason: rule.reason.clone(), - source: rule.source.clone(), - comparison_keys: build_comparison_keys(&rule.path), - specificity: normalized_path - .trim_end_matches(std::path::MAIN_SEPARATOR) - .len(), - normalized_path, - } -} - -fn dedupe_and_compile_rules(rules: &[ProtectedRuleDto]) -> Vec { - let mut compiled_by_key = HashMap::new(); - - for rule in rules { - let compiled = compile_rule(rule); - compiled_by_key.insert( - format!( - "{}:{}", - match compiled.protection_mode { - ProtectionModeDto::Direct => "direct", - ProtectionModeDto::Recursive => "recursive", - }, - compiled.normalized_path - ), - compiled, - ); - } - - let mut compiled = compiled_by_key.into_values().collect::>(); - compiled.sort_by(|a, b| { - b.specificity - .cmp(&a.specificity) - .then_with(|| match (a.protection_mode, b.protection_mode) { - (ProtectionModeDto::Recursive, ProtectionModeDto::Direct) => { - std::cmp::Ordering::Less - } - (ProtectionModeDto::Direct, ProtectionModeDto::Recursive) => { - std::cmp::Ordering::Greater - } - _ => std::cmp::Ordering::Equal, - }) - .then_with(|| a.path.cmp(&b.path)) - }); - compiled -} - -fn glob_builder(pattern: &str) -> Result { - GlobBuilder::new(pattern) - .literal_separator(true) - .backslash_escape(false) - .case_insensitive(cfg!(windows)) - .build() - .map_err(|error| error.to_string()) -} - -fn build_globset(patterns: &[String]) -> Result, String> { - if patterns.is_empty() { - return Ok(None); - } - - let mut builder = GlobSetBuilder::new(); - for pattern in patterns { - builder.add(glob_builder(pattern)?); - } - builder.build().map(Some).map_err(|error| error.to_string()) -} - -fn has_glob_magic(value: &str) -> bool { - value.contains('*') - || value.contains('?') - || value.contains('[') - || value.contains(']') - || value.contains('{') - || value.contains('}') - || value.contains('!') -} - -fn detect_glob_scan_root(pattern: &str) -> PathBuf { - let normalized = pattern.replace('\\', "/"); - if !has_glob_magic(&normalized) { - return resolve_absolute_path(&normalized); - } - - let first_magic_index = normalized - .char_indices() - .find_map(|(index, character)| has_glob_magic(&character.to_string()).then_some(index)) - .unwrap_or(normalized.len()); - - let prefix = normalized[..first_magic_index].trim_end_matches('/'); - if prefix.is_empty() { - return env::current_dir().unwrap_or_else(|_| PathBuf::from(".")); - } - - let scan_root = prefix.rsplit_once('/').map_or(prefix, |(head, _)| { - if head.is_empty() { - if normalized.starts_with('/') { - "/" - } else { - prefix - } - } else { - head - } - }); - - resolve_absolute_path(scan_root) -} - -/// A group of glob patterns that share the same scan root and ignore globs. -/// All patterns in the group are evaluated in a single directory walk. -#[derive(Debug, Clone)] -struct GlobGroup { - scan_root: PathBuf, - pattern_indices: Vec, -} - -/// Metadata associated with each glob pattern for result fan-out. -#[derive(Debug, Clone)] -struct GlobTargetMetadata { - is_protected: bool, - target_index: usize, - exclude_basenames: Vec, -} - -type GlobMatchResults = Vec<(usize, Vec)>; -type BatchedGlobExecutionResult = (GlobMatchResults, GlobMatchResults); - -/// Batched glob planner that groups patterns by scan root and ignore set. -/// This reduces the number of directory walks from O(patterns) to O(unique scan roots). -#[derive(Debug)] -struct BatchedGlobPlanner { - ignore_matcher: Option, - groups: Vec, - normalized_patterns: Vec, - metadata: Vec, -} - -impl BatchedGlobPlanner { - fn new(ignore_globs: &[String]) -> Result { - Ok(Self { - ignore_matcher: build_globset(ignore_globs)?, - groups: Vec::new(), - normalized_patterns: Vec::new(), - metadata: Vec::new(), - }) - } - - /// Add a glob pattern to the planner with its associated metadata. - fn add_pattern( - &mut self, - pattern: &str, - is_protected: bool, - target_index: usize, - exclude_basenames: Vec, - ) { - let normalized = normalize_glob_pattern(pattern); - let pattern_index = self.normalized_patterns.len(); - self.normalized_patterns.push(normalized.clone()); - self.metadata.push(GlobTargetMetadata { - is_protected, - target_index, - exclude_basenames, - }); - - // Non-glob patterns (literal paths) don't need directory scanning - if !has_glob_magic(&normalized) { - return; - } - - let scan_root = detect_glob_scan_root(&normalized); - let scan_root_str = path_to_string(&scan_root); - - // Find or create a group for this scan root - if let Some(group) = self - .groups - .iter_mut() - .find(|g| path_to_string(&g.scan_root) == scan_root_str) - { - group.pattern_indices.push(pattern_index); - } else { - self.groups.push(GlobGroup { - scan_root, - pattern_indices: vec![pattern_index], - }); - } - } - - /// Execute the batched glob expansion and fan results back to targets. - /// Returns (protected_matches, delete_matches) where each is a vec of (target_index, matched_paths). - fn execute(&self) -> Result { - let mut protected_results: HashMap> = HashMap::new(); - let mut delete_results: HashMap> = HashMap::new(); - - // Process literal paths (non-glob patterns) directly - for (pattern_index, pattern) in self.normalized_patterns.iter().enumerate() { - if has_glob_magic(pattern) { - continue; - } - - let absolute_path = resolve_absolute_path(pattern); - if !absolute_path.exists() { - continue; - } - - let candidate = path_to_glob_string(&absolute_path); - if self - .ignore_matcher - .as_ref() - .is_some_and(|compiled| compiled.is_match(&candidate)) - { - continue; - } - - let metadata = &self.metadata[pattern_index]; - let normalized_entry = path_to_string(&absolute_path); - - // Check exclude_basenames for delete targets - if !metadata.is_protected - && !metadata.exclude_basenames.is_empty() - && let Some(basename) = Path::new(&normalized_entry).file_name() - { - let basename_str = basename.to_string_lossy(); - if metadata - .exclude_basenames - .iter() - .any(|excluded| excluded == basename_str.as_ref()) - { - continue; - } - } - - let target_map = if metadata.is_protected { - &mut protected_results - } else { - &mut delete_results - }; - target_map - .entry(metadata.target_index) - .or_default() - .push(normalized_entry); - } - - // Process each group's patterns with a single directory walk - for group in &self.groups { - if !group.scan_root.exists() { - continue; - } - - let group_patterns: Vec = group - .pattern_indices - .iter() - .map(|&idx| self.normalized_patterns[idx].clone()) - .collect(); - - let matcher = build_globset(&group_patterns)? - .ok_or_else(|| "failed to compile cleanup glob batch".to_string())?; - - let walker = WalkDir::new(&group.scan_root) - .follow_links(false) - .into_iter() - .filter_entry(|entry| { - let candidate = path_to_glob_string(entry.path()); - !self - .ignore_matcher - .as_ref() - .is_some_and(|compiled| compiled.is_match(&candidate)) - }); - - for entry in walker { - let Ok(entry) = entry else { - continue; - }; - - let candidate = path_to_glob_string(entry.path()); - let matched_indices = matcher.matches(&candidate); - if matched_indices.is_empty() { - continue; - } - - let normalized_entry = path_to_string(&normalize_path(entry.path())); - - for matched_index in matched_indices { - let pattern_index = group.pattern_indices[matched_index]; - let metadata = &self.metadata[pattern_index]; - - // Check exclude_basenames for delete targets - if !metadata.is_protected - && !metadata.exclude_basenames.is_empty() - && let Some(basename) = Path::new(&normalized_entry).file_name() - { - let basename_str = basename.to_string_lossy(); - if metadata - .exclude_basenames - .iter() - .any(|excluded| excluded == basename_str.as_ref()) - { - continue; - } - } - - let target_map = if metadata.is_protected { - &mut protected_results - } else { - &mut delete_results - }; - target_map - .entry(metadata.target_index) - .or_default() - .push(normalized_entry.clone()); - } - } - } - - // Convert HashMaps to sorted Vecs and deduplicate - let mut protected_vec: Vec<(usize, Vec)> = protected_results - .into_iter() - .map(|(idx, mut paths)| { - paths.sort(); - paths.dedup(); - (idx, paths) - }) - .collect(); - protected_vec.sort_by_key(|(idx, _)| *idx); - - let mut delete_vec: Vec<(usize, Vec)> = delete_results - .into_iter() - .map(|(idx, mut paths)| { - paths.sort(); - paths.dedup(); - (idx, paths) - }) - .collect(); - delete_vec.sort_by_key(|(idx, _)| *idx); - - Ok((protected_vec, delete_vec)) - } -} - -/// Legacy function kept for backward compatibility with expand_protected_rules. -/// Prefer using BatchedGlobPlanner for new code. -fn expand_globs(patterns: &[String], ignore_globs: &[String]) -> Result>, String> { - if patterns.is_empty() { - return Ok(Vec::new()); - } - - let mut planner = BatchedGlobPlanner::new(ignore_globs)?; - for (index, pattern) in patterns.iter().enumerate() { - planner.add_pattern(pattern, false, index, Vec::new()); - } - - let (_, delete_results) = planner.execute()?; - let mut matches_by_pattern = vec![Vec::new(); patterns.len()]; - for (target_index, paths) in delete_results { - matches_by_pattern[target_index] = paths; - } - - Ok(matches_by_pattern) -} - -fn expand_protected_rules(rules: &[ProtectedRuleDto]) -> Result, String> { - let mut expanded = Vec::new(); - let mut glob_rules = Vec::new(); - - for rule in rules { - if !matches!(rule.matcher, Some(ProtectionRuleMatcherDto::Glob)) { - expanded.push(create_protected_rule( - &rule.path, - rule.protection_mode, - rule.reason.clone(), - rule.source.clone(), - None, - )); - continue; - } - glob_rules.push(rule.clone()); - } - - let matched_paths_by_rule = expand_globs( - &glob_rules - .iter() - .map(|rule| rule.path.clone()) - .collect::>(), - &[], - )?; - for (rule, matched_paths) in glob_rules.iter().zip(matched_paths_by_rule) { - for matched_path in matched_paths { - expanded.push(create_protected_rule( - &matched_path, - rule.protection_mode, - rule.reason.clone(), - rule.source.clone(), - None, - )); - } - } - - Ok(expanded) -} - -fn root_path_for(path: &Path) -> PathBuf { - let mut root = PathBuf::new(); - for component in path.components() { - match component { - Component::Prefix(prefix) => root.push(prefix.as_os_str()), - Component::RootDir => { - root.push(Path::new(std::path::MAIN_SEPARATOR_STR)); - break; - } - _ => break, - } - } - if root.as_os_str().is_empty() { - return PathBuf::from(std::path::MAIN_SEPARATOR_STR); - } - root -} - -fn collect_built_in_dangerous_path_rules() -> Vec { - let home_dir = resolve_home_dir(); - let xdg_config_home = env::var("XDG_CONFIG_HOME") - .ok() - .filter(|value| !value.trim().is_empty()) - .map(|value| resolve_absolute_path(&value)) - .unwrap_or_else(|| home_dir.join(".config")); - let xdg_data_home = env::var("XDG_DATA_HOME") - .ok() - .filter(|value| !value.trim().is_empty()) - .map(|value| resolve_absolute_path(&value)) - .unwrap_or_else(|| home_dir.join(".local/share")); - let xdg_state_home = env::var("XDG_STATE_HOME") - .ok() - .filter(|value| !value.trim().is_empty()) - .map(|value| resolve_absolute_path(&value)) - .unwrap_or_else(|| home_dir.join(".local/state")); - let xdg_cache_home = env::var("XDG_CACHE_HOME") - .ok() - .filter(|value| !value.trim().is_empty()) - .map(|value| resolve_absolute_path(&value)) - .unwrap_or_else(|| home_dir.join(".cache")); - - vec![ - create_protected_rule( - &path_to_string(&root_path_for(&home_dir)), - ProtectionModeDto::Direct, - "built-in dangerous root path", - "built-in-dangerous-root", - None, - ), - create_protected_rule( - &path_to_string(&home_dir), - ProtectionModeDto::Direct, - "built-in dangerous home directory", - "built-in-dangerous-root", - None, - ), - create_protected_rule( - &path_to_string(&xdg_config_home), - ProtectionModeDto::Direct, - "built-in dangerous config directory", - "built-in-dangerous-root", - None, - ), - create_protected_rule( - &path_to_string(&xdg_data_home), - ProtectionModeDto::Direct, - "built-in dangerous data directory", - "built-in-dangerous-root", - None, - ), - create_protected_rule( - &path_to_string(&xdg_state_home), - ProtectionModeDto::Direct, - "built-in dangerous state directory", - "built-in-dangerous-root", - None, - ), - create_protected_rule( - &path_to_string(&xdg_cache_home), - ProtectionModeDto::Direct, - "built-in dangerous cache directory", - "built-in-dangerous-root", - None, - ), - create_protected_rule( - &path_to_string(&home_dir.join(".aindex")), - ProtectionModeDto::Direct, - "built-in global aindex directory", - "built-in-dangerous-root", - None, - ), - create_protected_rule( - &path_to_string(&home_dir.join(".aindex/.tnmsc.json")), - ProtectionModeDto::Direct, - "built-in global config file", - "built-in-config", - None, - ), - ] -} - -fn collect_workspace_reserved_rules( - workspace_dir: &str, - project_roots: &[String], - include_reserved_workspace_content_roots: bool, -) -> Vec { - let workspace_dir = path_to_string(&resolve_absolute_path(workspace_dir)); - let mut rules = vec![ - create_protected_rule( - &workspace_dir, - ProtectionModeDto::Direct, - "workspace root", - "workspace-reserved", - None, - ), - create_protected_rule( - &path_to_string(&resolve_absolute_path(&format!("{workspace_dir}/aindex"))), - ProtectionModeDto::Direct, - "reserved workspace aindex root", - "workspace-reserved", - None, - ), - create_protected_rule( - &path_to_string(&resolve_absolute_path(&format!( - "{workspace_dir}/knowladge" - ))), - ProtectionModeDto::Direct, - "reserved workspace knowladge root", - "workspace-reserved", - None, - ), - ]; - - for project_root in project_roots { - rules.push(create_protected_rule( - project_root, - ProtectionModeDto::Direct, - "workspace project root", - "workspace-project-root", - None, - )); - } - - if include_reserved_workspace_content_roots { - rules.push(create_protected_rule( - &format!("{workspace_dir}/aindex/dist/**/*.mdx"), - ProtectionModeDto::Direct, - "reserved workspace aindex dist mdx files", - "workspace-reserved", - Some(ProtectionRuleMatcherDto::Glob), - )); - for series_name in ["app", "ext", "arch", "softwares"] { - rules.push(create_protected_rule( - &format!("{workspace_dir}/aindex/{series_name}/**/*.mdx"), - ProtectionModeDto::Direct, - format!("reserved workspace aindex {series_name} mdx files"), - "workspace-reserved", - Some(ProtectionRuleMatcherDto::Glob), - )); - } - } - - rules -} - -fn create_guard( - snapshot: &CleanupSnapshot, - rules: &[ProtectedRuleDto], -) -> Result { - let mut all_rules = collect_built_in_dangerous_path_rules(); - all_rules.extend(collect_workspace_reserved_rules( - &snapshot.workspace_dir, - &snapshot.project_roots, - true, - )); - - if let Some(aindex_dir) = snapshot.aindex_dir.as_ref() { - all_rules.push(create_protected_rule( - aindex_dir, - ProtectionModeDto::Direct, - "resolved aindex root", - "aindex-root", - None, - )); - } - - all_rules.extend_from_slice(rules); - let compiled_rules = dedupe_and_compile_rules(&expand_protected_rules(&all_rules)?); - - Ok(ProtectedDeletionGuard { compiled_rules }) -} - -fn is_rule_match(target_key: &str, rule_key: &str, protection_mode: ProtectionModeDto) -> bool { - match protection_mode { - ProtectionModeDto::Direct => is_same_or_child_path(rule_key, target_key), - ProtectionModeDto::Recursive => { - is_same_or_child_path(target_key, rule_key) - || is_same_or_child_path(rule_key, target_key) - } - } -} - -fn select_more_specific_rule( - candidate: &CompiledProtectedRule, - current: Option<&CompiledProtectedRule>, -) -> CompiledProtectedRule { - let Some(current) = current else { - return candidate.clone(); - }; - - if candidate.specificity != current.specificity { - return if candidate.specificity > current.specificity { - candidate.clone() - } else { - current.clone() - }; - } - - if candidate.protection_mode != current.protection_mode { - return if candidate.protection_mode == ProtectionModeDto::Recursive { - candidate.clone() - } else { - current.clone() - }; - } - - if candidate.path < current.path { - candidate.clone() - } else { - current.clone() - } -} - -fn get_protected_path_violation( - target_path: &str, - guard: &ProtectedDeletionGuard, -) -> Option { - let absolute_target_path = path_to_string(&resolve_absolute_path(target_path)); - let target_keys = build_comparison_keys(&absolute_target_path); - let mut matched_rule: Option = None; - - for rule in &guard.compiled_rules { - let mut did_match = false; - for target_key in &target_keys { - for rule_key in &rule.comparison_keys { - if !is_rule_match(target_key, rule_key, rule.protection_mode) { - continue; - } - - matched_rule = Some(select_more_specific_rule(rule, matched_rule.as_ref())); - did_match = true; - break; - } - if did_match { - break; - } - } - } - - matched_rule.map(|rule| ProtectedPathViolationDto { - target_path: absolute_target_path, - protected_path: rule.path, - protection_mode: rule.protection_mode, - reason: rule.reason, - source: rule.source, - }) -} - -fn partition_deletion_targets(paths: &[String], guard: &ProtectedDeletionGuard) -> PartitionResult { - let mut safe_paths = Vec::new(); - let mut violations = Vec::new(); - - for target_path in paths { - if let Some(violation) = get_protected_path_violation(target_path, guard) { - violations.push(violation); - } else { - safe_paths.push(path_to_string(&resolve_absolute_path(target_path))); - } - } - - safe_paths.sort(); - violations.sort_by(|a, b| a.target_path.cmp(&b.target_path)); - - PartitionResult { - safe_paths, - violations, - } -} - -fn compact_deletion_targets(files: &[String], dirs: &[String]) -> (Vec, Vec) { - let files_by_key = files - .iter() - .map(|file_path| { - let resolved = path_to_string(&resolve_absolute_path(file_path)); - (resolved.clone(), resolved) - }) - .collect::>(); - let dirs_by_key = dirs - .iter() - .map(|dir_path| { - let resolved = path_to_string(&resolve_absolute_path(dir_path)); - (resolved.clone(), resolved) - }) - .collect::>(); - - let mut sorted_dir_entries = dirs_by_key.into_iter().collect::>(); - sorted_dir_entries - .sort_by(|(left_key, _), (right_key, _)| left_key.len().cmp(&right_key.len())); - - let mut compacted_dirs: HashMap = HashMap::new(); - for (dir_key, dir_path) in sorted_dir_entries { - let covered_by_parent = compacted_dirs - .keys() - .any(|existing_parent_key| is_same_or_child_path(&dir_key, existing_parent_key)); - if !covered_by_parent { - compacted_dirs.insert(dir_key, dir_path); - } - } - - let mut compacted_files = Vec::new(); - for (file_key, file_path) in files_by_key { - let covered_by_dir = compacted_dirs - .keys() - .any(|dir_key| is_same_or_child_path(&file_key, dir_key)); - if !covered_by_dir { - compacted_files.push(file_path); - } - } - - compacted_files.sort(); - let mut compacted_dir_paths = compacted_dirs.into_values().collect::>(); - compacted_dir_paths.sort(); - - (compacted_files, compacted_dir_paths) -} - -fn should_skip_empty_directory_tree(workspace_dir: &str, current_dir: &str) -> bool { - if current_dir == workspace_dir { - return false; - } - - Path::new(current_dir) - .file_name() - .and_then(|value| value.to_str()) - .is_some_and(|basename| EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES.contains(&basename)) -} - -/// Check if a directory path should be excluded from empty-directory scan -/// because it matches a user-supplied glob from aindex.config.ts. -fn matches_empty_dir_exclude_globs( - dir_path: &Path, - workspace_dir: &Path, - absolute_exclude_set: &Option, - relative_exclude_set: &Option, -) -> bool { - let absolute_match = absolute_exclude_set - .as_ref() - .is_some_and(|globs| globs.is_match(path_to_glob_string(dir_path))); - if absolute_match { - return true; - } - - relative_exclude_set.as_ref().is_some_and(|globs| { - normalize_workspace_relative_path(dir_path, workspace_dir) - .is_some_and(|relative_path| globs.is_match(relative_path)) - }) -} - -fn collect_empty_workspace_directories( - current_dir: &Path, - workspace_dir: &Path, - files_to_delete: &HashSet, - dirs_to_delete: &HashSet, - empty_dirs_to_delete: &mut BTreeSet, - empty_dir_absolute_exclude: &Option, - empty_dir_relative_exclude: &Option, -) -> bool { - let current_dir = normalize_path(current_dir); - let current_dir_string = path_to_string(¤t_dir); - let workspace_dir_string = path_to_string(workspace_dir); - - if dirs_to_delete.contains(¤t_dir_string) { - return true; - } - - if should_skip_empty_directory_tree(&workspace_dir_string, ¤t_dir_string) { - return false; - } - - if matches_empty_dir_exclude_globs( - ¤t_dir, - workspace_dir, - empty_dir_absolute_exclude, - empty_dir_relative_exclude, - ) { - return false; - } - - let Ok(entries) = fs::read_dir(¤t_dir) else { - return false; - }; - - let mut has_retained_entries = false; - - for entry in entries { - let Ok(entry) = entry else { - has_retained_entries = true; - continue; - }; - - let entry_path = normalize_path(&entry.path()); - let entry_string = path_to_string(&entry_path); - - if dirs_to_delete.contains(&entry_string) { - continue; - } - - let Ok(file_type) = entry.file_type() else { - has_retained_entries = true; - continue; - }; - - if file_type.is_dir() { - if should_skip_empty_directory_tree(&workspace_dir_string, &entry_string) { - has_retained_entries = true; - continue; - } - - if matches_empty_dir_exclude_globs( - &entry_path, - workspace_dir, - empty_dir_absolute_exclude, - empty_dir_relative_exclude, - ) { - has_retained_entries = true; - continue; - } - - if collect_empty_workspace_directories( - &entry_path, - workspace_dir, - files_to_delete, - dirs_to_delete, - empty_dirs_to_delete, - empty_dir_absolute_exclude, - empty_dir_relative_exclude, - ) { - empty_dirs_to_delete.insert(entry_string); - continue; - } - - has_retained_entries = true; - continue; - } - - if files_to_delete.contains(&entry_string) { - continue; - } - - has_retained_entries = true; - } - - !has_retained_entries -} - -fn plan_workspace_empty_directory_cleanup( - workspace_dir: &str, - files_to_delete: &[String], - dirs_to_delete: &[String], - guard: &ProtectedDeletionGuard, - empty_dir_absolute_exclude: &Option, - empty_dir_relative_exclude: &Option, -) -> (Vec, Vec) { - let workspace_dir = resolve_absolute_path(workspace_dir); - let files_to_delete = files_to_delete - .iter() - .map(|path| path_to_string(&resolve_absolute_path(path))) - .collect::>(); - let dirs_to_delete = dirs_to_delete - .iter() - .map(|path| path_to_string(&resolve_absolute_path(path))) - .collect::>(); - let mut discovered_empty_dirs = BTreeSet::new(); - - collect_empty_workspace_directories( - &workspace_dir, - &workspace_dir, - &files_to_delete, - &dirs_to_delete, - &mut discovered_empty_dirs, - empty_dir_absolute_exclude, - empty_dir_relative_exclude, - ); - - let mut safe_empty_dirs = Vec::new(); - let mut violations = Vec::new(); - - for empty_dir in discovered_empty_dirs { - if let Some(violation) = get_protected_path_violation(&empty_dir, guard) { - violations.push(violation); - } else { - safe_empty_dirs.push(empty_dir); - } - } - - safe_empty_dirs.sort(); - violations.sort_by(|a, b| a.target_path.cmp(&b.target_path)); - - (safe_empty_dirs, violations) -} - -fn detect_cleanup_protection_conflicts( - output_path_owners: &HashMap>, - guard: &ProtectedDeletionGuard, -) -> Vec { - let mut conflicts = Vec::new(); - - for (output_path, output_plugins) in output_path_owners { - let output_keys = build_comparison_keys(output_path) - .into_iter() - .collect::>(); - - for rule in &guard.compiled_rules { - let is_exact_match = rule - .comparison_keys - .iter() - .any(|rule_key| output_keys.contains(rule_key)); - if !is_exact_match { - continue; - } - - for output_plugin in output_plugins { - conflicts.push(CleanupProtectionConflictDto { - output_path: output_path.clone(), - output_plugin: output_plugin.clone(), - protected_path: rule.path.clone(), - protection_mode: rule.protection_mode, - protected_by: rule.source.clone(), - reason: rule.reason.clone(), - }); - } - } - } - - conflicts.sort_by(|a, b| { - a.output_path - .cmp(&b.output_path) - .then_with(|| a.protected_path.cmp(&b.protected_path)) - }); - conflicts -} - -#[derive(Debug, Clone)] -struct ProtectedGlobCleanupTarget { - path: String, - protection_mode: ProtectionModeDto, - reason: String, - source: String, -} - -#[derive(Debug, Clone)] -struct DeleteGlobCleanupTarget { - target: CleanupTargetDto, -} - -fn default_protection_mode_for_target(target: &CleanupTargetDto) -> ProtectionModeDto { - target.protection_mode.unwrap_or(match target.kind { - CleanupTargetKindDto::File => ProtectionModeDto::Direct, - CleanupTargetKindDto::Directory | CleanupTargetKindDto::Glob => { - ProtectionModeDto::Recursive - } - }) -} - -pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { - let mut delete_files = HashSet::new(); - let mut delete_dirs = HashSet::new(); - let mut protected_rules = snapshot.protected_rules.clone(); - let mut exclude_scan_globs = BTreeSet::from_iter( - DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS - .iter() - .map(|value| (*value).to_string()), - ); - let mut output_path_owners = HashMap::>::new(); - let mut protected_glob_targets = Vec::::new(); - let mut delete_glob_targets = Vec::::new(); - - for plugin_snapshot in &snapshot.plugin_snapshots { - for output in &plugin_snapshot.outputs { - let resolved_output_path = path_to_string(&resolve_absolute_path(output)); - delete_files.insert(resolved_output_path.clone()); - output_path_owners - .entry(resolved_output_path) - .or_default() - .push(plugin_snapshot.plugin_name.clone()); - } - - for ignore_glob in &plugin_snapshot.cleanup.exclude_scan_globs { - exclude_scan_globs.insert(normalize_glob_pattern(ignore_glob)); - } - } - - let ignore_globs = exclude_scan_globs.iter().cloned().collect::>(); - - for plugin_snapshot in &snapshot.plugin_snapshots { - for target in &plugin_snapshot.cleanup.protect { - if target.kind == CleanupTargetKindDto::Glob { - protected_glob_targets.push(ProtectedGlobCleanupTarget { - path: target.path.clone(), - protection_mode: default_protection_mode_for_target(target), - reason: target - .label - .as_ref() - .map(|label| format!("plugin cleanup protect declaration ({label})")) - .unwrap_or_else(|| "plugin cleanup protect declaration".to_string()), - source: format!("plugin-cleanup-protect:{}", plugin_snapshot.plugin_name), - }); - continue; - } - - let reason = target - .label - .as_ref() - .map(|label| format!("plugin cleanup protect declaration ({label})")) - .unwrap_or_else(|| "plugin cleanup protect declaration".to_string()); - protected_rules.push(create_protected_rule( - &target.path, - default_protection_mode_for_target(target), - reason, - format!("plugin-cleanup-protect:{}", plugin_snapshot.plugin_name), - None, - )); - } - - for target in &plugin_snapshot.cleanup.delete { - if target.kind == CleanupTargetKindDto::Glob { - delete_glob_targets.push(DeleteGlobCleanupTarget { - target: target.clone(), - }); - continue; - } - - match target.kind { - CleanupTargetKindDto::Directory => { - delete_dirs.insert(path_to_string(&resolve_absolute_path(&target.path))); - } - CleanupTargetKindDto::File => { - delete_files.insert(path_to_string(&resolve_absolute_path(&target.path))); - } - CleanupTargetKindDto::Glob => {} - } - } - } - - // Batch all glob patterns (both protected and delete) into a single planner - // to minimize directory walks. This is the key performance optimization. - let mut planner = BatchedGlobPlanner::new(&ignore_globs)?; - - // Add protected glob targets - for (index, target) in protected_glob_targets.iter().enumerate() { - planner.add_pattern( - &target.path, - true, // is_protected - index, - Vec::new(), // protected globs don't use exclude_basenames - ); - } - - // Add delete glob targets - for (index, target) in delete_glob_targets.iter().enumerate() { - planner.add_pattern( - &target.target.path, - false, // is_delete - index, - target.target.exclude_basenames.clone(), - ); - } - - // Execute the batched glob expansion - let (protected_results, delete_results) = planner.execute()?; - - // Fan protected glob results back to their targets - for (target_index, matched_paths) in protected_results { - let target = &protected_glob_targets[target_index]; - for matched_path in matched_paths { - protected_rules.push(create_protected_rule( - &matched_path, - target.protection_mode, - target.reason.clone(), - target.source.clone(), - None, - )); - } - } - - // Fan delete glob results back to their targets - for (_target_index, matched_paths) in delete_results { - for matched_path in matched_paths { - let Ok(metadata) = fs::symlink_metadata(&matched_path) else { - continue; - }; - if metadata.is_dir() { - delete_dirs.insert(path_to_string(&resolve_absolute_path(&matched_path))); - } else { - delete_files.insert(path_to_string(&resolve_absolute_path(&matched_path))); - } - } - } - - let guard = create_guard(&snapshot, &protected_rules)?; - let conflicts = detect_cleanup_protection_conflicts(&output_path_owners, &guard); - if !conflicts.is_empty() { - return Ok(CleanupPlan { - files_to_delete: Vec::new(), - dirs_to_delete: Vec::new(), - empty_dirs_to_delete: Vec::new(), - violations: Vec::new(), - conflicts, - excluded_scan_globs: ignore_globs, - }); - } - - let file_partition = - partition_deletion_targets(&delete_files.into_iter().collect::>(), &guard); - let dir_partition = - partition_deletion_targets(&delete_dirs.into_iter().collect::>(), &guard); - let (files_to_delete, dirs_to_delete) = - compact_deletion_targets(&file_partition.safe_paths, &dir_partition.safe_paths); - let empty_dir_absolute_exclude_set = build_globset( - &snapshot - .empty_dir_exclude_globs - .iter() - .map(|pattern| { - if expand_home_path(pattern).is_absolute() { - normalize_glob_pattern(pattern) - } else { - path_to_glob_string(&resolve_absolute_path(&format!( - "{}/{}", - snapshot.workspace_dir, pattern - ))) - } - }) - .collect::>(), - )?; - let empty_dir_relative_exclude_set = build_globset( - &snapshot - .empty_dir_exclude_globs - .iter() - .filter(|pattern| !expand_home_path(pattern).is_absolute()) - .map(|pattern| normalize_relative_glob_pattern(pattern)) - .collect::>(), - )?; - let (empty_dirs_to_delete, empty_dir_violations) = plan_workspace_empty_directory_cleanup( - &snapshot.workspace_dir, - &files_to_delete, - &dirs_to_delete, - &guard, - &empty_dir_absolute_exclude_set, - &empty_dir_relative_exclude_set, - ); - - let mut violations = file_partition.violations; - violations.extend(dir_partition.violations); - violations.extend(empty_dir_violations); - violations.sort_by(|a, b| a.target_path.cmp(&b.target_path)); - - Ok(CleanupPlan { - files_to_delete, - dirs_to_delete, - empty_dirs_to_delete, - violations, - conflicts: Vec::new(), - excluded_scan_globs: ignore_globs, - }) -} - -pub fn perform_cleanup(snapshot: CleanupSnapshot) -> Result { - let plan = plan_cleanup(snapshot)?; - if !plan.conflicts.is_empty() || !plan.violations.is_empty() { - return Ok(CleanupExecutionResultDto { - deleted_files: 0, - deleted_dirs: 0, - errors: Vec::new(), - violations: plan.violations, - conflicts: plan.conflicts, - files_to_delete: plan.files_to_delete, - dirs_to_delete: plan.dirs_to_delete, - empty_dirs_to_delete: plan.empty_dirs_to_delete, - excluded_scan_globs: plan.excluded_scan_globs, - }); - } - - let delete_result = desk_paths::delete_targets(&plan.files_to_delete, &plan.dirs_to_delete); - let empty_dir_result = desk_paths::delete_empty_directories(&plan.empty_dirs_to_delete); - let mut errors = delete_result - .file_errors - .into_iter() - .map(|error| CleanupErrorDto { - path: error.path, - kind: CleanupErrorKindDto::File, - error: error.error, - }) - .collect::>(); - errors.extend( - delete_result - .dir_errors - .into_iter() - .map(|error| CleanupErrorDto { - path: error.path, - kind: CleanupErrorKindDto::Directory, - error: error.error, - }), - ); - errors.extend( - empty_dir_result - .errors - .into_iter() - .map(|error| CleanupErrorDto { - path: error.path, - kind: CleanupErrorKindDto::Directory, - error: error.error, - }), - ); - - Ok(CleanupExecutionResultDto { - deleted_files: delete_result.deleted_files.len(), - deleted_dirs: delete_result.deleted_dirs.len() + empty_dir_result.deleted_paths.len(), - errors, - violations: Vec::new(), - conflicts: Vec::new(), - files_to_delete: plan.files_to_delete, - dirs_to_delete: plan.dirs_to_delete, - empty_dirs_to_delete: plan.empty_dirs_to_delete, - excluded_scan_globs: plan.excluded_scan_globs, - }) -} - -#[cfg(feature = "napi")] -mod napi_binding { - use napi_derive::napi; - - use super::{CleanupExecutionResultDto, CleanupPlan, CleanupSnapshot}; - - fn parse_snapshot(snapshot_json: String) -> napi::Result { - serde_json::from_str(&snapshot_json) - .map_err(|error| napi::Error::from_reason(error.to_string())) - } - - fn serialize_result(result: &T) -> napi::Result { - serde_json::to_string(result).map_err(|error| napi::Error::from_reason(error.to_string())) - } - - #[napi] - pub fn plan_cleanup(snapshot_json: String) -> napi::Result { - let snapshot = parse_snapshot(snapshot_json)?; - let result: CleanupPlan = - super::plan_cleanup(snapshot).map_err(napi::Error::from_reason)?; - serialize_result(&result) - } - - #[napi] - pub fn perform_cleanup(snapshot_json: String) -> napi::Result { - let snapshot = parse_snapshot(snapshot_json)?; - let result: CleanupExecutionResultDto = - super::perform_cleanup(snapshot).map_err(napi::Error::from_reason)?; - serialize_result(&result) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use tempfile::tempdir; - - fn empty_snapshot(workspace_dir: &Path) -> CleanupSnapshot { - CleanupSnapshot { - workspace_dir: path_to_string(workspace_dir), - aindex_dir: Some(path_to_string(&workspace_dir.join("aindex"))), - project_roots: vec![path_to_string(&workspace_dir.join("project-a"))], - protected_rules: Vec::new(), - plugin_snapshots: Vec::new(), - empty_dir_exclude_globs: Vec::new(), - } - } - - fn single_plugin_snapshot( - workspace_dir: &Path, - outputs: Vec, - cleanup: CleanupDeclarationsDto, - ) -> CleanupSnapshot { - CleanupSnapshot { - plugin_snapshots: vec![PluginCleanupSnapshotDto { - plugin_name: "MockOutputPlugin".to_string(), - outputs, - cleanup, - }], - ..empty_snapshot(workspace_dir) - } - } - - #[test] - fn detects_exact_output_protection_conflicts() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let output_path = workspace_dir.join("project-a/AGENTS.md"); - fs::create_dir_all(output_path.parent().unwrap()).unwrap(); - fs::write(&output_path, "# output").unwrap(); - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![path_to_string(&output_path)], - CleanupDeclarationsDto { - protect: vec![CleanupTargetDto { - path: path_to_string(&output_path), - kind: CleanupTargetKindDto::File, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }], - ..CleanupDeclarationsDto::default() - }, - ); - - let plan = plan_cleanup(snapshot).unwrap(); - assert_eq!(plan.conflicts.len(), 1); - assert!(plan.files_to_delete.is_empty()); - assert!(plan.dirs_to_delete.is_empty()); - } - - #[test] - fn expands_delete_globs_and_respects_excluded_basenames() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let skills_dir = workspace_dir.join(".codex/skills"); - let system_dir = skills_dir.join(".system"); - let stale_dir = skills_dir.join("legacy"); - fs::create_dir_all(&system_dir).unwrap(); - fs::create_dir_all(&stale_dir).unwrap(); - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![], - CleanupDeclarationsDto { - delete: vec![CleanupTargetDto { - path: path_to_string(&skills_dir.join("*")), - kind: CleanupTargetKindDto::Glob, - exclude_basenames: vec![".system".to_string()], - protection_mode: None, - scope: None, - label: None, - }], - ..CleanupDeclarationsDto::default() - }, - ); - - let plan = plan_cleanup(snapshot).unwrap(); - assert!(plan.dirs_to_delete.contains(&path_to_string(&stale_dir))); - assert!(!plan.dirs_to_delete.contains(&path_to_string(&system_dir))); - } - - #[test] - fn preserves_direct_vs_recursive_guard_behavior() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let direct_dir = workspace_dir.join("project-a"); - let recursive_dir = workspace_dir.join("aindex/dist"); - let direct_file = direct_dir.join("AGENTS.md"); - let recursive_file = recursive_dir.join("commands/demo.mdx"); - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![ - path_to_string(&direct_file), - path_to_string(&recursive_file), - ], - CleanupDeclarationsDto { - protect: vec![ - CleanupTargetDto { - path: path_to_string(&direct_dir), - kind: CleanupTargetKindDto::Directory, - exclude_basenames: Vec::new(), - protection_mode: Some(ProtectionModeDto::Direct), - scope: None, - label: None, - }, - CleanupTargetDto { - path: path_to_string(&recursive_dir), - kind: CleanupTargetKindDto::Directory, - exclude_basenames: Vec::new(), - protection_mode: Some(ProtectionModeDto::Recursive), - scope: None, - label: None, - }, - ], - ..CleanupDeclarationsDto::default() - }, - ); - - let plan = plan_cleanup(snapshot).unwrap(); - assert!(plan.files_to_delete.contains(&path_to_string(&direct_file))); - assert!(plan - .violations - .iter() - .any(|violation| violation.target_path == path_to_string(&recursive_file))); - } - - #[test] - fn blocks_reserved_workspace_mdx_descendants() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let protected_file = workspace_dir.join("aindex/dist/commands/demo.mdx"); - fs::create_dir_all(protected_file.parent().unwrap()).unwrap(); - fs::write(&protected_file, "# demo").unwrap(); - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![], - CleanupDeclarationsDto { - delete: vec![CleanupTargetDto { - path: path_to_string(&workspace_dir.join("aindex/dist")), - kind: CleanupTargetKindDto::Directory, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }], - ..CleanupDeclarationsDto::default() - }, - ); - - let plan = plan_cleanup(snapshot).unwrap(); - assert!(plan.dirs_to_delete.is_empty()); - assert_eq!(plan.violations.len(), 1); - assert_eq!( - plan.violations[0].protected_path, - path_to_string(&protected_file) - ); - } - - #[cfg(unix)] - #[test] - fn matches_symlink_realpaths_against_protected_paths() { - use std::os::unix::fs::symlink; - - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let symlink_path = temp_dir.path().join("workspace-link"); - fs::create_dir_all(&workspace_dir).unwrap(); - symlink(&workspace_dir, &symlink_path).unwrap(); - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![], - CleanupDeclarationsDto { - delete: vec![CleanupTargetDto { - path: path_to_string(&symlink_path), - kind: CleanupTargetKindDto::Directory, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }], - ..CleanupDeclarationsDto::default() - }, - ); - - let plan = plan_cleanup(snapshot).unwrap(); - assert!(plan.dirs_to_delete.is_empty()); - assert!(plan - .violations - .iter() - .any(|violation| violation.target_path == path_to_string(&symlink_path))); - } - - #[test] - fn compacts_nested_directory_targets() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let base_dir = workspace_dir.join(".claude"); - let rules_dir = base_dir.join("rules"); - let rule_file = rules_dir.join("demo.md"); - fs::create_dir_all(&rules_dir).unwrap(); - fs::write(&rule_file, "# demo").unwrap(); - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![path_to_string(&rule_file)], - CleanupDeclarationsDto { - delete: vec![ - CleanupTargetDto { - path: path_to_string(&base_dir), - kind: CleanupTargetKindDto::Directory, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }, - CleanupTargetDto { - path: path_to_string(&rules_dir), - kind: CleanupTargetKindDto::Directory, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }, - CleanupTargetDto { - path: path_to_string(&rule_file), - kind: CleanupTargetKindDto::File, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }, - ], - ..CleanupDeclarationsDto::default() - }, - ); - - let plan = plan_cleanup(snapshot).unwrap(); - assert_eq!(plan.dirs_to_delete, vec![path_to_string(&base_dir)]); - assert!(plan.files_to_delete.is_empty()); - } - - #[test] - fn plans_workspace_empty_directories_while_skipping_excluded_trees() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let source_leaf_dir = workspace_dir.join("source/empty/leaf"); - let source_keep_file = workspace_dir.join("source/keep.md"); - let dist_empty_dir = workspace_dir.join("dist/ghost"); - let node_modules_empty_dir = workspace_dir.join("node_modules/pkg/ghost"); - let git_empty_dir = workspace_dir.join(".git/objects/info"); - - fs::create_dir_all(&source_leaf_dir).unwrap(); - fs::create_dir_all(source_keep_file.parent().unwrap()).unwrap(); - fs::create_dir_all(&dist_empty_dir).unwrap(); - fs::create_dir_all(&node_modules_empty_dir).unwrap(); - fs::create_dir_all(&git_empty_dir).unwrap(); - fs::write(&source_keep_file, "# keep").unwrap(); - - let snapshot = - single_plugin_snapshot(&workspace_dir, vec![], CleanupDeclarationsDto::default()); - - let plan = plan_cleanup(snapshot).unwrap(); - assert!(plan.files_to_delete.is_empty()); - assert!(plan.dirs_to_delete.is_empty()); - assert_eq!( - plan.empty_dirs_to_delete, - vec![ - path_to_string(&workspace_dir.join("source/empty")), - path_to_string(&source_leaf_dir), - ] - ); - assert!(!plan - .empty_dirs_to_delete - .contains(&path_to_string(&workspace_dir))); - assert!(!plan - .empty_dirs_to_delete - .contains(&path_to_string(&dist_empty_dir))); - assert!(!plan - .empty_dirs_to_delete - .contains(&path_to_string(&node_modules_empty_dir))); - assert!(!plan - .empty_dirs_to_delete - .contains(&path_to_string(&git_empty_dir))); - } - - #[test] - fn performs_cleanup_and_prunes_workspace_empty_directories() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let output_file = workspace_dir.join("generated/AGENTS.md"); - let empty_leaf_dir = workspace_dir.join("scratch/empty/leaf"); - let retained_scratch_file = workspace_dir.join("scratch/keep.md"); - - fs::create_dir_all(output_file.parent().unwrap()).unwrap(); - fs::create_dir_all(&empty_leaf_dir).unwrap(); - fs::create_dir_all(retained_scratch_file.parent().unwrap()).unwrap(); - fs::write(&output_file, "# generated").unwrap(); - fs::write(&retained_scratch_file, "# keep").unwrap(); - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![path_to_string(&output_file)], - CleanupDeclarationsDto::default(), - ); - - let result = perform_cleanup(snapshot).unwrap(); - assert_eq!(result.deleted_files, 1); - assert_eq!(result.deleted_dirs, 3); - assert!(result.errors.is_empty()); - assert!(!output_file.exists()); - assert!(!workspace_dir.join("generated").exists()); - assert!(!empty_leaf_dir.exists()); - assert!(!workspace_dir.join("scratch/empty").exists()); - assert!(workspace_dir.join("scratch").exists()); - } - - #[test] - fn preserves_empty_directories_excluded_by_workspace_relative_globs() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let excluded_leaf_dir = workspace_dir.join("volumes/cache/leaf"); - let regular_leaf_dir = workspace_dir.join("scratch/empty/leaf"); - - fs::create_dir_all(&excluded_leaf_dir).unwrap(); - fs::create_dir_all(®ular_leaf_dir).unwrap(); - - let mut snapshot = - single_plugin_snapshot(&workspace_dir, vec![], CleanupDeclarationsDto::default()); - snapshot.empty_dir_exclude_globs = vec!["volumes/**".to_string()]; - - let plan = plan_cleanup(snapshot).unwrap(); - assert!(!plan - .empty_dirs_to_delete - .contains(&path_to_string(&workspace_dir.join("volumes/cache")))); - assert!(!plan - .empty_dirs_to_delete - .contains(&path_to_string(&excluded_leaf_dir))); - assert!(plan - .empty_dirs_to_delete - .contains(&path_to_string(&workspace_dir.join("scratch/empty")))); - assert!(plan - .empty_dirs_to_delete - .contains(&path_to_string(®ular_leaf_dir))); - } - - #[test] - fn skips_reserved_volume_trees_during_empty_directory_scan() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let volumes_leaf_dir = workspace_dir.join("volumes/cache/leaf"); - let hidden_volumes_leaf_dir = workspace_dir.join(".volumes/cache/leaf"); - let regular_leaf_dir = workspace_dir.join("scratch/empty/leaf"); - - fs::create_dir_all(&volumes_leaf_dir).unwrap(); - fs::create_dir_all(&hidden_volumes_leaf_dir).unwrap(); - fs::create_dir_all(®ular_leaf_dir).unwrap(); - - let snapshot = - single_plugin_snapshot(&workspace_dir, vec![], CleanupDeclarationsDto::default()); - - let plan = plan_cleanup(snapshot).unwrap(); - assert!(!plan - .empty_dirs_to_delete - .contains(&path_to_string(&workspace_dir.join("volumes/cache")))); - assert!(!plan - .empty_dirs_to_delete - .contains(&path_to_string(&volumes_leaf_dir))); - assert!(!plan - .empty_dirs_to_delete - .contains(&path_to_string(&workspace_dir.join(".volumes/cache")))); - assert!(!plan - .empty_dirs_to_delete - .contains(&path_to_string(&hidden_volumes_leaf_dir))); - assert!(plan - .empty_dirs_to_delete - .contains(&path_to_string(&workspace_dir.join("scratch/empty")))); - assert!(plan - .empty_dirs_to_delete - .contains(&path_to_string(®ular_leaf_dir))); - } - - #[test] - fn batched_glob_planner_handles_multiple_globs_sharing_root() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let cache_dir = workspace_dir.join("cache"); - let temp_dir_path = workspace_dir.join("temp"); - let logs_dir = workspace_dir.join("logs"); - - // Create test directories - fs::create_dir_all(cache_dir.join("sub1")).unwrap(); - fs::create_dir_all(cache_dir.join("sub2")).unwrap(); - fs::create_dir_all(temp_dir_path.join("tmp1")).unwrap(); - fs::create_dir_all(logs_dir.join("2024")).unwrap(); - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![], - CleanupDeclarationsDto { - delete: vec![ - CleanupTargetDto { - path: path_to_string(&cache_dir.join("*")), - kind: CleanupTargetKindDto::Glob, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: Some("cache-cleanup".to_string()), - }, - CleanupTargetDto { - path: path_to_string(&temp_dir_path.join("*")), - kind: CleanupTargetKindDto::Glob, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: Some("temp-cleanup".to_string()), - }, - ], - ..CleanupDeclarationsDto::default() - }, - ); - - let plan = plan_cleanup(snapshot).unwrap(); - // Should match subdirectories under cache/ and temp/ but not logs/ - assert_eq!(plan.dirs_to_delete.len(), 3); - assert!(plan - .dirs_to_delete - .contains(&path_to_string(&cache_dir.join("sub1")))); - assert!(plan - .dirs_to_delete - .contains(&path_to_string(&cache_dir.join("sub2")))); - assert!(plan - .dirs_to_delete - .contains(&path_to_string(&temp_dir_path.join("tmp1")))); - assert!(!plan - .dirs_to_delete - .contains(&path_to_string(&logs_dir.join("2024")))); - } - - #[test] - fn batched_glob_planner_handles_mixed_protect_and_delete_globs() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let data_dir = workspace_dir.join("data"); - let keep_dir = data_dir.join("keep"); - let delete_dir = data_dir.join("delete"); - - fs::create_dir_all(&keep_dir).unwrap(); - fs::create_dir_all(&delete_dir).unwrap(); - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![], - CleanupDeclarationsDto { - delete: vec![CleanupTargetDto { - path: path_to_string(&data_dir.join("*")), - kind: CleanupTargetKindDto::Glob, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }], - protect: vec![CleanupTargetDto { - // Protect the keep_dir itself using Recursive mode to protect its descendants too - path: path_to_string(&keep_dir), - kind: CleanupTargetKindDto::Directory, - exclude_basenames: Vec::new(), - protection_mode: Some(ProtectionModeDto::Recursive), - scope: None, - label: Some("protect-keep".to_string()), - }], - ..CleanupDeclarationsDto::default() - }, - ); - - let plan = plan_cleanup(snapshot).unwrap(); - // delete_dir should be deleted, keep_dir should NOT be deleted (protected by Directory target) - assert!(plan.dirs_to_delete.contains(&path_to_string(&delete_dir))); - assert!(!plan.dirs_to_delete.contains(&path_to_string(&keep_dir))); - // keep_dir is protected, so attempting to delete it is a violation - assert!(plan - .violations - .iter() - .any(|v| v.target_path == path_to_string(&keep_dir))); - } - - #[test] - fn batched_glob_planner_respects_exclude_basenames() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let build_dir = workspace_dir.join("build"); - let release_dir = build_dir.join("release"); - let debug_dir = build_dir.join("debug"); - let keep_dir = build_dir.join(".gitkeep"); - - fs::create_dir_all(&release_dir).unwrap(); - fs::create_dir_all(&debug_dir).unwrap(); - fs::create_dir_all(&keep_dir).unwrap(); - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![], - CleanupDeclarationsDto { - delete: vec![CleanupTargetDto { - path: path_to_string(&build_dir.join("*")), - kind: CleanupTargetKindDto::Glob, - exclude_basenames: vec![".gitkeep".to_string()], - protection_mode: None, - scope: None, - label: Some("build-cleanup".to_string()), - }], - ..CleanupDeclarationsDto::default() - }, - ); - - let plan = plan_cleanup(snapshot).unwrap(); - // Should delete release and debug, but not .gitkeep - assert!(plan.dirs_to_delete.contains(&path_to_string(&release_dir))); - assert!(plan.dirs_to_delete.contains(&path_to_string(&debug_dir))); - assert!(!plan.dirs_to_delete.contains(&path_to_string(&keep_dir))); - } - - #[test] - fn batched_glob_planner_produces_stable_sorted_output() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let outputs_dir = workspace_dir.join("outputs"); - - // Create directories in non-alphabetical order - let dirs = vec!["zeta", "alpha", "beta", "gamma", "delta"]; - for dir in &dirs { - fs::create_dir_all(outputs_dir.join(dir)).unwrap(); - } - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![], - CleanupDeclarationsDto { - delete: vec![CleanupTargetDto { - path: path_to_string(&outputs_dir.join("*")), - kind: CleanupTargetKindDto::Glob, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }], - ..CleanupDeclarationsDto::default() - }, - ); - - let plan = plan_cleanup(snapshot).unwrap(); - - // Verify output is sorted - let expected_order: Vec = dirs - .iter() - .map(|d| path_to_string(&outputs_dir.join(d))) - .collect::>() - .into_iter() - .collect::>() - .into_iter() - .collect(); - - assert_eq!(plan.dirs_to_delete, expected_order); - - // Run multiple times to ensure stability - for _ in 0..3 { - let plan2 = plan_cleanup(single_plugin_snapshot( - &workspace_dir, - vec![], - CleanupDeclarationsDto { - delete: vec![CleanupTargetDto { - path: path_to_string(&outputs_dir.join("*")), - kind: CleanupTargetKindDto::Glob, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }], - ..CleanupDeclarationsDto::default() - }, - )) - .unwrap(); - assert_eq!(plan.dirs_to_delete, plan2.dirs_to_delete); - } - } - - #[test] - fn batched_glob_planner_handles_file_vs_directory_classification() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let mixed_dir = workspace_dir.join("mixed"); - let file_path = mixed_dir.join("file.txt"); - let dir_path = mixed_dir.join("subdir"); - - fs::create_dir_all(&dir_path).unwrap(); - fs::write(&file_path, "content").unwrap(); - - let snapshot = single_plugin_snapshot( - &workspace_dir, - vec![], - CleanupDeclarationsDto { - delete: vec![CleanupTargetDto { - path: path_to_string(&mixed_dir.join("*")), - kind: CleanupTargetKindDto::Glob, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }], - ..CleanupDeclarationsDto::default() - }, - ); - - let plan = plan_cleanup(snapshot).unwrap(); - - // Files should be in files_to_delete, dirs in dirs_to_delete - assert!(plan.files_to_delete.contains(&path_to_string(&file_path))); - assert!(plan.dirs_to_delete.contains(&path_to_string(&dir_path))); - } - - #[test] - fn batched_glob_planner_handles_cross_plugin_glob_batching() { - let temp_dir = tempdir().unwrap(); - let workspace_dir = temp_dir.path().join("workspace"); - let project_a = workspace_dir.join("project-a/temp"); - let project_b = workspace_dir.join("project-b/temp"); - - fs::create_dir_all(project_a.join("old")).unwrap(); - fs::create_dir_all(project_b.join("cache")).unwrap(); - - // Multi-plugin snapshot to test cross-plugin batching - let snapshot = CleanupSnapshot { - workspace_dir: path_to_string(&workspace_dir), - aindex_dir: Some(path_to_string(&workspace_dir.join("aindex"))), - project_roots: vec![ - path_to_string(&workspace_dir.join("project-a")), - path_to_string(&workspace_dir.join("project-b")), - ], - protected_rules: Vec::new(), - plugin_snapshots: vec![ - PluginCleanupSnapshotDto { - plugin_name: "PluginA".to_string(), - outputs: vec![], - cleanup: CleanupDeclarationsDto { - delete: vec![CleanupTargetDto { - path: path_to_string(&project_a.join("*")), - kind: CleanupTargetKindDto::Glob, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }], - ..CleanupDeclarationsDto::default() - }, - }, - PluginCleanupSnapshotDto { - plugin_name: "PluginB".to_string(), - outputs: vec![], - cleanup: CleanupDeclarationsDto { - delete: vec![CleanupTargetDto { - path: path_to_string(&project_b.join("*")), - kind: CleanupTargetKindDto::Glob, - exclude_basenames: Vec::new(), - protection_mode: None, - scope: None, - label: None, - }], - ..CleanupDeclarationsDto::default() - }, - }, - ], - empty_dir_exclude_globs: Vec::new(), - }; - - let plan = plan_cleanup(snapshot).unwrap(); - - // Both plugins' globs should be resolved - assert_eq!(plan.dirs_to_delete.len(), 2); - assert!(plan - .dirs_to_delete - .contains(&path_to_string(&project_a.join("old")))); - assert!(plan - .dirs_to_delete - .contains(&path_to_string(&project_b.join("cache")))); - } -} diff --git a/cli/src/core/config/mod.rs b/cli/src/core/config/mod.rs deleted file mode 100644 index 6a1181b8..00000000 --- a/cli/src/core/config/mod.rs +++ /dev/null @@ -1,1513 +0,0 @@ -#![deny(clippy::all)] - -//! Configuration loading, merging, and validation. -//! -//! Reads only `~/.aindex/.tnmsc.json` (global), -//! then merges with defaults. - -pub mod series_filter; - -use std::collections::HashMap; -use std::env; -use std::fs; -use std::path::{Path, PathBuf}; - -use serde::{Deserialize, Serialize}; -use serde_json::Value; - -use crate::diagnostic_helpers::{diagnostic, line, optional_details}; -use tnmsc_logger::{Logger, create_logger}; - -// --------------------------------------------------------------------------- -// Constants -// --------------------------------------------------------------------------- - -pub const DEFAULT_CONFIG_FILE_NAME: &str = ".tnmsc.json"; -pub const DEFAULT_GLOBAL_CONFIG_DIR: &str = ".aindex"; -pub const DEFAULT_WSL_WINDOWS_USERS_ROOT: &str = "/mnt/c/Users"; - -fn path_details(path: &Path) -> Option> { - optional_details(serde_json::json!({ - "path": path.to_string_lossy() - })) -} - -fn path_error_details(path: &Path, error: &str) -> Option> { - optional_details(serde_json::json!({ - "path": path.to_string_lossy(), - "error": error - })) -} - -// --------------------------------------------------------------------------- -// Types — mirrors TS ConfigTypes.schema.ts -// --------------------------------------------------------------------------- - -/// A source/dist path pair. Both paths are relative to the aindex project root. -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] -pub struct DirPair { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub src: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub dist: Option, -} - -impl DirPair { - fn merge(a: &Option, b: &Option) -> Option { - match (a, b) { - (None, None) => None, - (Some(v), None) => Some(v.clone()), - (None, Some(v)) => Some(v.clone()), - (Some(base), Some(over)) => Some(DirPair { - src: over.src.clone().or_else(|| base.src.clone()), - dist: over.dist.clone().or_else(|| base.dist.clone()), - }), - } - } -} - -/// Aindex configuration. -/// All paths are relative to `/`. -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct AindexConfig { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub dir: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub skills: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub commands: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub sub_agents: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub rules: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub global_prompt: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub workspace_prompt: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub app: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ext: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub arch: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub softwares: Option, -} - -/// Per-plugin fast command series override options. -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct FastCommandSeriesPluginOverride { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub include_series_prefix: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub series_separator: Option, -} - -/// Fast command series configuration options. -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct FastCommandSeriesOptions { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub include_series_prefix: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub plugin_overrides: Option>, -} - -/// User profile information. Supports arbitrary key-value pairs. -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] -pub struct UserProfile { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub name: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub username: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub gender: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub birthday: Option, - #[serde(flatten)] - pub extra: HashMap, -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -#[serde(untagged)] -pub enum StringOrStrings { - Single(String), - Multiple(Vec), -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct WindowsWsl2Options { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub instances: Option, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct WindowsOptions { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub wsl2: Option, -} - -/// User configuration file (.tnmsc.json). -/// All fields are optional — missing fields use default values. -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct UserConfigFile { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub version: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub workspace_dir: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub aindex: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub log_level: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub fast_command_series_options: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub profile: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub windows: Option, -} - -// --------------------------------------------------------------------------- -// Result types -// --------------------------------------------------------------------------- - -/// Result of loading a single config file. -#[derive(Debug, Clone)] -pub struct ConfigLoadResult { - pub config: UserConfigFile, - pub source: Option, - pub found: bool, -} - -/// Result of loading and merging all configurations. -#[derive(Debug, Clone)] -pub struct MergedConfigResult { - pub config: UserConfigFile, - pub sources: Vec, - pub found: bool, -} - -/// Validation result for global config. -#[derive(Debug, Clone)] -pub struct GlobalConfigValidationResult { - pub valid: bool, - pub exists: bool, - pub errors: Vec, - pub should_exit: bool, -} - -// --------------------------------------------------------------------------- -// Path helpers -// --------------------------------------------------------------------------- - -#[derive(Debug, Clone, Default)] -pub struct RuntimeEnvironmentContext { - pub is_wsl: bool, - pub native_home_dir: Option, - pub effective_home_dir: Option, - pub selected_global_config_path: Option, - pub windows_users_root: PathBuf, -} - -fn home_dir() -> Option { - dirs::home_dir() -} - -fn normalize_posix_like_path(raw_path: &str) -> String { - let replaced = raw_path.replace('\\', "/"); - let has_root = replaced.starts_with('/'); - let mut components: Vec<&str> = Vec::new(); - - for component in replaced.split('/') { - if component.is_empty() || component == "." { - continue; - } - - if component == ".." { - if let Some(last_component) = components.last() - && *last_component != ".." - { - components.pop(); - continue; - } - - if !has_root { - components.push(component); - } - continue; - } - - components.push(component); - } - - let joined = components.join("/"); - if has_root { - if joined.is_empty() { - "/".to_string() - } else { - format!("/{joined}") - } - } else { - joined - } -} - -fn is_same_or_child_path(candidate_path: &str, parent_path: &str) -> bool { - let normalized_candidate = normalize_posix_like_path(candidate_path); - let normalized_parent = normalize_posix_like_path(parent_path); - - normalized_candidate == normalized_parent - || normalized_candidate.starts_with(&format!("{normalized_parent}/")) -} - -fn convert_windows_path_to_wsl(raw_path: &str) -> Option { - let bytes = raw_path.as_bytes(); - if bytes.len() < 3 - || !bytes[0].is_ascii_alphabetic() - || bytes[1] != b':' - || (bytes[2] != b'\\' && bytes[2] != b'/') - { - return None; - } - - let drive_letter = char::from(bytes[0]).to_ascii_lowercase(); - let relative_path = raw_path[2..] - .trim_start_matches(['\\', '/']) - .replace('\\', "/"); - let base_path = format!("/mnt/{drive_letter}"); - - if relative_path.is_empty() { - Some(PathBuf::from(base_path)) - } else { - Some(Path::new(&base_path).join(relative_path)) - } -} - -fn resolve_wsl_host_home_candidate(users_root: &Path, raw_path: Option<&str>) -> Option { - let raw_path = raw_path?.trim(); - if raw_path.is_empty() { - return None; - } - - let normalized_users_root = normalize_posix_like_path(&users_root.to_string_lossy()); - let candidate_paths = [ - convert_windows_path_to_wsl(raw_path) - .map(|candidate_path| normalize_posix_like_path(&candidate_path.to_string_lossy())), - Some(normalize_posix_like_path(raw_path)), - ]; - - for candidate_path in candidate_paths.into_iter().flatten() { - if is_same_or_child_path(&candidate_path, &normalized_users_root) { - return Some(PathBuf::from(candidate_path)); - } - } - - None -} - -fn resolve_preferred_wsl_host_home_dirs_for( - users_root: &Path, - userprofile: Option<&str>, - homedrive: Option<&str>, - homepath: Option<&str>, - home: Option<&str>, -) -> Vec { - let mut preferred_home_dirs: Vec = Vec::new(); - let combined_home_path = match (homedrive, homepath) { - (Some(drive), Some(home_path)) if !drive.is_empty() && !home_path.is_empty() => { - Some(format!("{drive}{home_path}")) - } - _ => None, - }; - - for candidate in [ - resolve_wsl_host_home_candidate(users_root, userprofile), - resolve_wsl_host_home_candidate(users_root, combined_home_path.as_deref()), - resolve_wsl_host_home_candidate(users_root, home), - ] - .into_iter() - .flatten() - { - if !preferred_home_dirs - .iter() - .any(|existing| existing == &candidate) - { - preferred_home_dirs.push(candidate); - } - } - - preferred_home_dirs -} - -fn non_empty_env_var(name: &str) -> Option { - env::var(name).ok().filter(|value| !value.is_empty()) -} - -fn resolve_preferred_wsl_host_home_dirs_with_root(users_root: &Path) -> Vec { - let userprofile = non_empty_env_var("USERPROFILE"); - let homedrive = non_empty_env_var("HOMEDRIVE"); - let homepath = non_empty_env_var("HOMEPATH"); - let home = non_empty_env_var("HOME"); - - resolve_preferred_wsl_host_home_dirs_for( - users_root, - userprofile.as_deref(), - homedrive.as_deref(), - homepath.as_deref(), - home.as_deref(), - ) -} - -fn global_config_home_dir(candidate_path: &Path) -> Option { - candidate_path - .parent() - .and_then(|parent| parent.parent()) - .map(PathBuf::from) -} - -fn select_wsl_host_global_config_path_for( - users_root: &Path, - userprofile: Option<&str>, - homedrive: Option<&str>, - homepath: Option<&str>, - home: Option<&str>, -) -> Option { - let candidates = find_wsl_host_global_config_paths_with_root(users_root); - let preferred_home_dirs = resolve_preferred_wsl_host_home_dirs_for( - users_root, - userprofile, - homedrive, - homepath, - home, - ); - - if !preferred_home_dirs.is_empty() { - for preferred_home_dir in preferred_home_dirs { - if let Some(candidate_path) = candidates.iter().find(|candidate_path| { - global_config_home_dir(candidate_path).as_ref() == Some(&preferred_home_dir) - }) { - return Some(candidate_path.clone()); - } - } - - return None; - } - - if candidates.len() == 1 { - return candidates.into_iter().next(); - } - - None -} - -fn select_wsl_host_global_config_path_with_root(users_root: &Path) -> Option { - let userprofile = non_empty_env_var("USERPROFILE"); - let homedrive = non_empty_env_var("HOMEDRIVE"); - let homepath = non_empty_env_var("HOMEPATH"); - let home = non_empty_env_var("HOME"); - - select_wsl_host_global_config_path_for( - users_root, - userprofile.as_deref(), - homedrive.as_deref(), - homepath.as_deref(), - home.as_deref(), - ) -} - -fn build_required_wsl_config_resolution_error(users_root: &Path) -> String { - let preferred_home_dirs = resolve_preferred_wsl_host_home_dirs_with_root(users_root); - let candidates = find_wsl_host_global_config_paths_with_root(users_root); - let config_lookup_pattern = format!( - "\"{}/*/{}/{}\"", - users_root.to_string_lossy(), - DEFAULT_GLOBAL_CONFIG_DIR, - DEFAULT_CONFIG_FILE_NAME - ); - - if candidates.is_empty() { - return format!("WSL host config file not found under {config_lookup_pattern}."); - } - - if !preferred_home_dirs.is_empty() { - return format!( - "WSL host config file for the current Windows user was not found under {config_lookup_pattern}." - ); - } - - format!( - "WSL host config file could not be matched to the current Windows user under {config_lookup_pattern}." - ) -} - -fn is_wsl_runtime_for( - os_name: &str, - wsl_distro_name: Option<&str>, - wsl_interop: Option<&str>, - release: &str, -) -> bool { - if os_name != "linux" { - return false; - } - - if wsl_distro_name.is_some_and(|value| !value.is_empty()) - || wsl_interop.is_some_and(|value| !value.is_empty()) - { - return true; - } - - release.to_lowercase().contains("microsoft") -} - -pub fn is_wsl_runtime() -> bool { - let release = fs::read_to_string("/proc/sys/kernel/osrelease").unwrap_or_default(); - let wsl_distro_name = env::var("WSL_DISTRO_NAME").ok(); - let wsl_interop = env::var("WSL_INTEROP").ok(); - - is_wsl_runtime_for( - env::consts::OS, - wsl_distro_name.as_deref(), - wsl_interop.as_deref(), - &release, - ) -} - -pub fn find_wsl_host_global_config_paths_with_root(users_root: &Path) -> Vec { - if !users_root.is_dir() { - return vec![]; - } - - let mut candidates: Vec = match fs::read_dir(users_root) { - Ok(entries) => entries - .filter_map(|entry| entry.ok()) - .filter_map(|entry| { - let entry_path = entry.path(); - if !entry_path.is_dir() { - return None; - } - - let candidate_path = entry_path - .join(DEFAULT_GLOBAL_CONFIG_DIR) - .join(DEFAULT_CONFIG_FILE_NAME); - if candidate_path.is_file() { - Some(candidate_path) - } else { - None - } - }) - .collect(), - Err(_) => vec![], - }; - - candidates.sort_by(|a, b| a.to_string_lossy().cmp(&b.to_string_lossy())); - candidates -} - -pub fn resolve_runtime_environment_with_root(users_root: PathBuf) -> RuntimeEnvironmentContext { - let native_home_dir = home_dir(); - let is_wsl = is_wsl_runtime(); - let selected_global_config_path = if is_wsl { - select_wsl_host_global_config_path_with_root(&users_root) - } else { - None - }; - let effective_home_dir = selected_global_config_path - .as_ref() - .and_then(|config_path| config_path.parent().and_then(|parent| parent.parent())) - .map(PathBuf::from) - .or_else(|| native_home_dir.clone()); - - RuntimeEnvironmentContext { - is_wsl, - native_home_dir, - effective_home_dir, - selected_global_config_path, - windows_users_root: users_root, - } -} - -pub fn resolve_runtime_environment() -> RuntimeEnvironmentContext { - resolve_runtime_environment_with_root(PathBuf::from(DEFAULT_WSL_WINDOWS_USERS_ROOT)) -} - -/// Resolve `~` prefix to the user's home directory. -pub fn resolve_tilde(p: &str) -> PathBuf { - let runtime_environment = resolve_runtime_environment(); - if let Some(rest) = p.strip_prefix('~') - && let Some(home) = runtime_environment - .effective_home_dir - .or(runtime_environment.native_home_dir) - { - let rest = rest - .strip_prefix('/') - .or_else(|| rest.strip_prefix('\\')) - .unwrap_or(rest); - return home.join(rest); - } - PathBuf::from(p) -} - -/// Get the global config file path: `~/.aindex/.tnmsc.json` -pub fn get_global_config_path() -> PathBuf { - let runtime_environment = resolve_runtime_environment(); - - if let Some(selected_path) = runtime_environment.selected_global_config_path { - return selected_path; - } - - match runtime_environment - .effective_home_dir - .or(runtime_environment.native_home_dir) - { - Some(home) => home - .join(DEFAULT_GLOBAL_CONFIG_DIR) - .join(DEFAULT_CONFIG_FILE_NAME), - None => PathBuf::from(DEFAULT_GLOBAL_CONFIG_DIR).join(DEFAULT_CONFIG_FILE_NAME), - } -} - -pub fn get_required_global_config_path() -> Result { - let runtime_environment = resolve_runtime_environment(); - - if runtime_environment.is_wsl && runtime_environment.selected_global_config_path.is_none() { - return Err(build_required_wsl_config_resolution_error( - &runtime_environment.windows_users_root, - )); - } - - Ok(get_global_config_path()) -} - -// --------------------------------------------------------------------------- -// Merge logic -// --------------------------------------------------------------------------- - -fn merge_aindex(a: &Option, b: &Option) -> Option { - match (a, b) { - (None, None) => None, - (Some(v), None) => Some(v.clone()), - (None, Some(v)) => Some(v.clone()), - (Some(base), Some(over)) => Some(AindexConfig { - dir: over.dir.clone().or_else(|| base.dir.clone()), - skills: DirPair::merge(&base.skills, &over.skills), - commands: DirPair::merge(&base.commands, &over.commands), - sub_agents: DirPair::merge(&base.sub_agents, &over.sub_agents), - rules: DirPair::merge(&base.rules, &over.rules), - global_prompt: DirPair::merge(&base.global_prompt, &over.global_prompt), - workspace_prompt: DirPair::merge(&base.workspace_prompt, &over.workspace_prompt), - app: DirPair::merge(&base.app, &over.app), - ext: DirPair::merge(&base.ext, &over.ext), - arch: DirPair::merge(&base.arch, &over.arch), - softwares: DirPair::merge(&base.softwares, &over.softwares), - }), - } -} - -fn merge_windows(a: &Option, b: &Option) -> Option { - match (a, b) { - (None, None) => None, - (Some(v), None) => Some(v.clone()), - (None, Some(v)) => Some(v.clone()), - (Some(base), Some(over)) => Some(WindowsOptions { - wsl2: match (&base.wsl2, &over.wsl2) { - (None, None) => None, - (Some(v), None) => Some(v.clone()), - (None, Some(v)) => Some(v.clone()), - (Some(base_wsl2), Some(over_wsl2)) => Some(WindowsWsl2Options { - instances: over_wsl2 - .instances - .clone() - .or_else(|| base_wsl2.instances.clone()), - }), - }, - }), - } -} - -/// Merge two configs. `over` fields take priority over `base`. -pub fn merge_configs_pair(base: &UserConfigFile, over: &UserConfigFile) -> UserConfigFile { - let merged_aindex = merge_aindex(&base.aindex, &over.aindex); - let merged_windows = merge_windows(&base.windows, &over.windows); - - UserConfigFile { - version: over.version.clone().or_else(|| base.version.clone()), - workspace_dir: over - .workspace_dir - .clone() - .or_else(|| base.workspace_dir.clone()), - aindex: merged_aindex, - log_level: over.log_level.clone().or_else(|| base.log_level.clone()), - fast_command_series_options: over - .fast_command_series_options - .clone() - .or_else(|| base.fast_command_series_options.clone()), - profile: over.profile.clone().or_else(|| base.profile.clone()), - windows: merged_windows, - } -} - -/// Merge a list of configs. First has highest priority, last has lowest. -fn merge_configs(configs: &[UserConfigFile]) -> UserConfigFile { - if configs.is_empty() { - return UserConfigFile::default(); - } - if configs.len() == 1 { - return configs[0].clone(); - } - // Reverse: merge from lowest to highest priority - let mut result = UserConfigFile::default(); - for config in configs.iter().rev() { - result = merge_configs_pair(&result, config); - } - result -} - -// --------------------------------------------------------------------------- -// ConfigLoader -// --------------------------------------------------------------------------- - -/// Options for ConfigLoader. -#[derive(Debug, Clone, Default)] -pub struct ConfigLoaderOptions {} - -/// ConfigLoader handles discovery and loading of user configuration files. -/// -/// The config source is fixed and unambiguous: -/// 1. Global: `~/.aindex/.tnmsc.json` -pub struct ConfigLoader { - logger: Logger, -} - -impl ConfigLoader { - pub fn new(_options: ConfigLoaderOptions) -> Self { - Self { - logger: create_logger("ConfigLoader", None), - } - } - - pub fn with_defaults() -> Self { - Self::new(ConfigLoaderOptions::default()) - } - - pub fn try_get_search_paths(&self, _cwd: &Path) -> Result, String> { - let runtime_environment = resolve_runtime_environment(); - - if runtime_environment.is_wsl { - self.logger.info( - Value::String("wsl environment detected".into()), - Some(serde_json::json!({ - "effectiveHomeDir": runtime_environment - .effective_home_dir - .as_ref() - .map(|path| path.to_string_lossy().into_owned()) - })), - ); - } - - let config_path = get_required_global_config_path()?; - if runtime_environment.is_wsl { - self.logger.info( - Value::String("using wsl host global config".into()), - Some(serde_json::json!({ - "path": config_path.to_string_lossy() - })), - ); - } - - Ok(vec![config_path]) - } - - /// Get the list of config file paths to search. - pub fn get_search_paths(&self, _cwd: &Path) -> Vec { - vec![get_global_config_path()] - } - - /// Load a single config file. - pub fn load_from_file(&self, file_path: &Path) -> ConfigLoadResult { - let resolved = if file_path.starts_with("~") { - resolve_tilde(&file_path.to_string_lossy()) - } else { - file_path.to_path_buf() - }; - - if !resolved.exists() { - return ConfigLoadResult { - config: UserConfigFile::default(), - source: None, - found: false, - }; - } - - match fs::read_to_string(&resolved) { - Ok(content) => match self.parse_config(&content, &resolved) { - Ok(config) => { - self.logger.debug( - Value::String("loaded".into()), - Some(serde_json::json!({"source": resolved.to_string_lossy()})), - ); - ConfigLoadResult { - config, - source: Some(resolved.to_string_lossy().into_owned()), - found: true, - } - } - Err(_) => ConfigLoadResult { - config: UserConfigFile::default(), - source: None, - found: false, - }, - }, - Err(e) => { - self.logger.warn(diagnostic( - "CONFIG_FILE_LOAD_FAILED", - "Config file could not be loaded", - line("The config file exists but could not be read, so it was skipped."), - Some(line( - "Check that the file exists, is readable, and is not locked.", - )), - None, - path_error_details(&resolved, &e.to_string()), - )); - ConfigLoadResult { - config: UserConfigFile::default(), - source: None, - found: false, - } - } - } - } - - pub fn try_load(&self, cwd: &Path) -> Result { - let search_paths = self.try_get_search_paths(cwd)?; - let mut loaded: Vec = Vec::new(); - - for path in &search_paths { - let result = self.load_from_file(path); - if result.found { - loaded.push(result); - } - } - - let configs: Vec = loaded.iter().map(|r| r.config.clone()).collect(); - let merged = merge_configs(&configs); - let sources: Vec = loaded.iter().filter_map(|r| r.source.clone()).collect(); - - Ok(MergedConfigResult { - config: merged, - sources, - found: !loaded.is_empty(), - }) - } - - /// Load and merge all config files. - pub fn load(&self, cwd: &Path) -> MergedConfigResult { - self.try_load(cwd).unwrap_or_else(|error| { - self.logger.error(diagnostic( - "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", - "Failed to resolve the global config path", - line("The runtime could not determine which global config file should be loaded."), - Some(line( - "Ensure the expected global config exists and retry the command.", - )), - None, - optional_details(serde_json::json!({ "error": error })), - )); - - MergedConfigResult { - config: UserConfigFile::default(), - sources: vec![], - found: false, - } - }) - } - - fn parse_config(&self, content: &str, file_path: &Path) -> Result { - let parsed: Value = serde_json::from_str(content) - .map_err(|e| format!("Invalid JSON in {}: {}", file_path.display(), e))?; - - if !parsed.is_object() { - return Err(format!( - "Config must be a JSON object in {}", - file_path.display() - )); - } - - // Deserialize with serde — invalid fields are silently ignored (like Zod's safeParse) - match serde_json::from_value::(parsed.clone()) { - Ok(config) => Ok(config), - Err(e) => { - self.logger.warn(diagnostic( - "CONFIG_FILE_VALIDATION_WARNING", - "Config contains invalid fields", - line("One or more config fields could not be deserialized, so defaults were used."), - Some(line("Fix the field types in the config file and retry.")), - None, - path_error_details(file_path, &e.to_string()), - )); - // Fallback: try to extract what we can - Ok( - serde_json::from_value::(Value::Object(Default::default())) - .unwrap_or_default(), - ) - } - } - } -} - -// --------------------------------------------------------------------------- -// Convenience functions -// --------------------------------------------------------------------------- - -/// Load user configuration using default loader. -pub fn load_user_config(cwd: &Path) -> Result { - ConfigLoader::with_defaults().try_load(cwd) -} - -// --------------------------------------------------------------------------- -// Config file management -// --------------------------------------------------------------------------- - -/// Write a config file with pretty JSON formatting. -pub fn write_config(path: &Path, config: &UserConfigFile, logger: &Logger) { - if let Some(parent) = path.parent() - && !parent.exists() - { - let _ = fs::create_dir_all(parent); - } - - match serde_json::to_string_pretty(config) { - Ok(json) => { - let content = format!("{}\n", json); - match fs::write(path, content) { - Ok(()) => { - logger.info( - Value::String("global config created".into()), - Some(serde_json::json!({"path": path.to_string_lossy()})), - ); - } - Err(e) => { - logger.warn(diagnostic( - "CONFIG_WRITE_FAILED", - "Failed to write the config file", - line("The CLI generated config JSON but could not write it to disk."), - Some(line( - "Check that the destination directory is writable and retry.", - )), - None, - path_error_details(path, &e.to_string()), - )); - } - } - } - Err(e) => { - logger.warn(diagnostic( - "CONFIG_SERIALIZATION_FAILED", - "Failed to serialize the config file", - line("The config object could not be converted to JSON."), - None, - None, - optional_details(serde_json::json!({ "error": e.to_string() })), - )); - } - } -} - -/// Validate global config file strictly. -/// -/// - If config doesn't exist: create default config, log warn, continue -/// - If config is invalid: preserve the file, log error, return should_exit=true -pub fn validate_and_ensure_global_config( - default_config: &UserConfigFile, -) -> GlobalConfigValidationResult { - let logger = create_logger("ConfigLoader", None); - let config_path = match get_required_global_config_path() { - Ok(path) => path, - Err(error) => { - logger.error(diagnostic( - "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", - "Failed to resolve the global config path", - line("The runtime could not determine the expected global config file location."), - Some(line( - "Ensure the required host config exists before retrying tnmsc.", - )), - None, - optional_details(serde_json::json!({ "error": error })), - )); - return GlobalConfigValidationResult { - valid: false, - exists: false, - errors: vec![error], - should_exit: true, - }; - } - }; - - if !config_path.exists() { - logger.warn(diagnostic( - "GLOBAL_CONFIG_MISSING_DEFAULT_CREATED", - "Global config was missing", - line("No global config file exists at the expected path, so a default file will be created."), - Some(line("Review the generated config if you need custom settings.")), - None, - path_details(&config_path), - )); - write_config(&config_path, default_config, &logger); - return GlobalConfigValidationResult { - valid: true, - exists: false, - errors: vec![], - should_exit: false, - }; - } - - // Try to read - let content = match fs::read_to_string(&config_path) { - Ok(c) => c, - Err(e) => { - let msg = format!("Failed to read config: {}", e); - logger.error(diagnostic( - "GLOBAL_CONFIG_READ_FAILED", - "Failed to read the global config", - line("The global config file exists but could not be read."), - Some(line( - "Check file permissions and confirm the path points to a readable file.", - )), - None, - path_error_details(&config_path, &e.to_string()), - )); - return preserve_invalid_config_and_exit(&config_path, &logger, vec![msg]); - } - }; - - // Try to parse JSON - let parsed: Value = match serde_json::from_str(&content) { - Ok(v) => v, - Err(e) => { - let msg = format!("Invalid JSON: {}", e); - logger.error(diagnostic( - "GLOBAL_CONFIG_INVALID_JSON", - "Global config contains invalid JSON", - line("The global config file is not valid JSON."), - Some(line("Fix the JSON syntax in the config file and retry.")), - None, - path_error_details(&config_path, &e.to_string()), - )); - return preserve_invalid_config_and_exit(&config_path, &logger, vec![msg]); - } - }; - - // Must be an object - if !parsed.is_object() { - logger.error(diagnostic( - "GLOBAL_CONFIG_NOT_OBJECT", - "Global config must be a JSON object", - line( - "The global config parsed successfully, but its top-level value is not an object.", - ), - Some(line( - "Replace the top-level JSON value with an object like `{}` and retry.", - )), - None, - path_details(&config_path), - )); - return preserve_invalid_config_and_exit( - &config_path, - &logger, - vec!["Config must be a JSON object".into()], - ); - } - - // Try to deserialize - if let Err(e) = serde_json::from_value::(parsed) { - let msg = format!("Config validation error: {}", e); - logger.error(diagnostic( - "GLOBAL_CONFIG_VALIDATION_FAILED", - "Global config failed schema validation", - line("The JSON shape does not match the expected config schema."), - Some(line( - "Fix the invalid field types or names in the config file and retry.", - )), - None, - path_error_details(&config_path, &e.to_string()), - )); - return preserve_invalid_config_and_exit(&config_path, &logger, vec![msg]); - } - - GlobalConfigValidationResult { - valid: true, - exists: true, - errors: vec![], - should_exit: false, - } -} - -fn preserve_invalid_config_and_exit( - config_path: &Path, - logger: &Logger, - errors: Vec, -) -> GlobalConfigValidationResult { - logger.error(diagnostic( - "GLOBAL_CONFIG_PRESERVED", - "Invalid global config was preserved", - line("The CLI stopped rather than overwriting the invalid global config."), - Some(line( - "Fix the file at the reported path and restart the command.", - )), - None, - path_details(config_path), - )); - - GlobalConfigValidationResult { - valid: false, - exists: true, - errors, - should_exit: true, - } -} - -// --------------------------------------------------------------------------- -// Tests -// --------------------------------------------------------------------------- - -#[cfg(test)] -mod tests { - use super::*; - use tempfile::TempDir; - - #[test] - fn test_resolve_tilde() { - let resolved = resolve_tilde("~/test/path"); - if let Some(home) = home_dir() { - assert_eq!(resolved, home.join("test").join("path")); - } - } - - #[test] - fn test_resolve_tilde_no_tilde() { - let resolved = resolve_tilde("/absolute/path"); - assert_eq!(resolved, PathBuf::from("/absolute/path")); - } - - #[test] - fn test_user_config_file_default() { - let config = UserConfigFile::default(); - assert!(config.version.is_none()); - assert!(config.workspace_dir.is_none()); - assert!(config.aindex.is_none()); - assert!(config.log_level.is_none()); - } - - #[test] - fn test_user_config_file_deserialize() { - let json = r#"{ - "workspaceDir": "~/myworkspace", - "logLevel": "debug" - }"#; - let config: UserConfigFile = serde_json::from_str(json).unwrap(); - assert_eq!(config.workspace_dir.as_deref(), Some("~/myworkspace")); - assert_eq!(config.log_level.as_deref(), Some("debug")); - } - - #[test] - fn test_user_config_file_deserialize_with_aindex() { - let json = r#"{ - "aindex": { - "skills": {"src": "src/skills", "dist": "dist/skills"}, - "commands": {"src": "src/commands", "dist": "dist/commands"}, - "subAgents": {"src": "src/agents", "dist": "dist/agents"}, - "rules": {"src": "src/rules", "dist": "dist/rules"}, - "globalPrompt": {"src": "global.src.mdx", "dist": "dist/global.mdx"}, - "workspacePrompt": {"src": "workspace.src.mdx", "dist": "dist/workspace.mdx"}, - "app": {"src": "app", "dist": "dist/app"}, - "ext": {"src": "ext", "dist": "dist/ext"}, - "arch": {"src": "arch", "dist": "dist/arch"}, - "softwares": {"src": "softwares", "dist": "dist/softwares"} - } - }"#; - let config: UserConfigFile = serde_json::from_str(json).unwrap(); - let aindex = config.aindex.unwrap(); - assert_eq!( - aindex.skills.as_ref().unwrap().src.as_deref(), - Some("src/skills") - ); - assert_eq!( - aindex.commands.as_ref().unwrap().src.as_deref(), - Some("src/commands") - ); - } - - #[test] - fn test_user_config_file_deserialize_with_profile() { - let json = r#"{ - "profile": { - "name": "Zhang San", - "username": "zhangsan", - "gender": "male", - "birthday": "1990-01-01", - "customField": "custom value" - } - }"#; - let config: UserConfigFile = serde_json::from_str(json).unwrap(); - let profile = config.profile.unwrap(); - assert_eq!(profile.name.as_deref(), Some("Zhang San")); - assert_eq!( - profile.extra.get("customField").and_then(|v| v.as_str()), - Some("custom value") - ); - } - - #[test] - fn test_user_config_file_deserialize_with_windows_wsl2_instances() { - let json = r#"{ - "windows": { - "wsl2": { - "instances": ["Ubuntu", "Debian"] - } - } - }"#; - let config: UserConfigFile = serde_json::from_str(json).unwrap(); - - match config - .windows - .and_then(|windows| windows.wsl2) - .and_then(|wsl2| wsl2.instances) - { - Some(StringOrStrings::Multiple(instances)) => { - assert_eq!(instances, vec!["Ubuntu".to_string(), "Debian".to_string()]); - } - other => panic!("expected windows.wsl2.instances array, got {:?}", other), - } - } - - #[test] - fn test_user_config_file_roundtrip() { - let config = UserConfigFile { - workspace_dir: Some("~/workspace".into()), - log_level: Some("info".into()), - ..Default::default() - }; - let json = serde_json::to_string(&config).unwrap(); - let parsed: UserConfigFile = serde_json::from_str(&json).unwrap(); - assert_eq!(config, parsed); - } - - #[test] - fn test_merge_configs_empty() { - let result = merge_configs(&[]); - assert_eq!(result, UserConfigFile::default()); - } - - #[test] - fn test_merge_configs_single() { - let config = UserConfigFile { - workspace_dir: Some("~/ws".into()), - ..Default::default() - }; - let result = merge_configs(std::slice::from_ref(&config)); - assert_eq!(result, config); - } - - #[test] - fn test_merge_configs_priority() { - let cwd_config = UserConfigFile { - workspace_dir: Some("~/cwd-workspace".into()), - log_level: Some("debug".into()), - ..Default::default() - }; - let global_config = UserConfigFile { - workspace_dir: Some("~/global-workspace".into()), - log_level: Some("info".into()), - aindex: Some(AindexConfig { - skills: Some(DirPair { - src: Some("global/skills".into()), - dist: Some("global/dist/skills".into()), - }), - ..Default::default() - }), - ..Default::default() - }; - - // cwd_config is first (highest priority) - let result = merge_configs(&[cwd_config, global_config]); - assert_eq!(result.workspace_dir.as_deref(), Some("~/cwd-workspace")); - assert_eq!(result.log_level.as_deref(), Some("debug")); - assert_eq!( - result - .aindex - .as_ref() - .and_then(|s| s.skills.as_ref()) - .and_then(|p| p.src.as_deref()), - Some("global/skills") - ); - } - - #[test] - fn test_merge_configs_merges_windows_options() { - let base_config = UserConfigFile { - windows: Some(WindowsOptions { - wsl2: Some(WindowsWsl2Options { - instances: Some(StringOrStrings::Single("Ubuntu".into())), - }), - }), - ..Default::default() - }; - let override_config = UserConfigFile { - log_level: Some("debug".into()), - ..Default::default() - }; - - let merged = merge_configs_pair(&base_config, &override_config); - match merged - .windows - .and_then(|windows| windows.wsl2) - .and_then(|wsl2| wsl2.instances) - { - Some(StringOrStrings::Single(instance)) => assert_eq!(instance, "Ubuntu"), - other => panic!( - "expected merged windows.wsl2.instances value, got {:?}", - other - ), - } - } - - #[test] - fn test_merge_aindex_deep() { - let cwd_config = UserConfigFile { - aindex: Some(AindexConfig { - skills: Some(DirPair { - src: Some("custom/skills".into()), - dist: Some("custom/dist/skills".into()), - }), - ..Default::default() - }), - ..Default::default() - }; - let global_config = UserConfigFile { - aindex: Some(AindexConfig { - skills: Some(DirPair { - src: Some("src/skills".into()), - dist: Some("dist/skills".into()), - }), - commands: Some(DirPair { - src: Some("src/commands".into()), - dist: Some("dist/commands".into()), - }), - ..Default::default() - }), - ..Default::default() - }; - - let result = merge_configs(&[cwd_config, global_config]); - let aindex = result.aindex.unwrap(); - assert_eq!( - aindex.skills.as_ref().unwrap().src.as_deref(), - Some("custom/skills") - ); - assert_eq!( - aindex.commands.as_ref().unwrap().src.as_deref(), - Some("src/commands") - ); - } - - #[test] - fn test_config_loader_search_paths() { - let loader = ConfigLoader::with_defaults(); - let cwd = PathBuf::from("/workspace/project"); - let paths = loader.get_search_paths(&cwd); - - assert_eq!(paths, vec![get_global_config_path()]); - } - - #[test] - fn test_find_wsl_host_global_config_paths_with_root_sorts_candidates() { - let temp_dir = TempDir::new().unwrap(); - let users_root = temp_dir.path().join("Users"); - let alpha_config_path = users_root.join("alpha").join(".aindex").join(".tnmsc.json"); - let bravo_config_path = users_root.join("bravo").join(".aindex").join(".tnmsc.json"); - - fs::create_dir_all(alpha_config_path.parent().unwrap()).unwrap(); - fs::create_dir_all(bravo_config_path.parent().unwrap()).unwrap(); - fs::write(&alpha_config_path, "{}\n").unwrap(); - fs::write(&bravo_config_path, "{}\n").unwrap(); - - let candidates = find_wsl_host_global_config_paths_with_root(&users_root); - assert_eq!(candidates, vec![alpha_config_path, bravo_config_path]); - } - - #[test] - fn test_select_wsl_host_global_config_path_for_prefers_matching_userprofile() { - let temp_dir = TempDir::new().unwrap(); - let users_root = temp_dir.path().join("Users"); - let alpha_config_path = users_root.join("alpha").join(".aindex").join(".tnmsc.json"); - let bravo_config_path = users_root.join("bravo").join(".aindex").join(".tnmsc.json"); - - fs::create_dir_all(alpha_config_path.parent().unwrap()).unwrap(); - fs::create_dir_all(bravo_config_path.parent().unwrap()).unwrap(); - fs::write(&alpha_config_path, "{}\n").unwrap(); - fs::write(&bravo_config_path, "{}\n").unwrap(); - - let selected = select_wsl_host_global_config_path_for( - &users_root, - Some(&users_root.join("bravo").to_string_lossy()), - None, - None, - None, - ); - - assert_eq!(selected, Some(bravo_config_path)); - } - - #[test] - fn test_select_wsl_host_global_config_path_for_rejects_other_windows_profile() { - let temp_dir = TempDir::new().unwrap(); - let users_root = temp_dir.path().join("Users"); - let alpha_config_path = users_root.join("alpha").join(".aindex").join(".tnmsc.json"); - - fs::create_dir_all(alpha_config_path.parent().unwrap()).unwrap(); - fs::write(&alpha_config_path, "{}\n").unwrap(); - - let selected = select_wsl_host_global_config_path_for( - &users_root, - Some(&users_root.join("bravo").to_string_lossy()), - None, - None, - None, - ); - - assert_eq!(selected, None); - } - - #[test] - fn test_is_wsl_runtime_for_detects_linux_wsl_inputs() { - assert!(is_wsl_runtime_for("linux", Some("Ubuntu"), None, "")); - assert!(is_wsl_runtime_for( - "linux", - None, - Some("/run/WSL/12_interop"), - "" - )); - assert!(is_wsl_runtime_for( - "linux", - None, - None, - "5.15.167.4-microsoft-standard-WSL2" - )); - assert!(!is_wsl_runtime_for("windows", Some("Ubuntu"), None, "")); - } - - #[test] - fn test_config_loader_load_nonexistent() { - let loader = ConfigLoader::with_defaults(); - let result = loader.load_from_file(Path::new("/nonexistent/.tnmsc.json")); - assert!(!result.found); - assert!(result.source.is_none()); - } - - #[test] - fn test_dir_pair_merge() { - let a = Some(DirPair { - src: Some("a-src".into()), - dist: Some("a-dist".into()), - }); - let b = Some(DirPair { - src: Some("b-src".into()), - dist: None, - }); - let merged = DirPair::merge(&a, &b).unwrap(); - assert_eq!(merged.src.as_deref(), Some("b-src")); - assert_eq!(merged.dist.as_deref(), Some("a-dist")); - } - - #[test] - fn test_global_config_path() { - let path = get_global_config_path(); - let path_str = path.to_string_lossy(); - assert!(path_str.contains(DEFAULT_GLOBAL_CONFIG_DIR)); - assert!(path_str.contains(DEFAULT_CONFIG_FILE_NAME)); - } - - #[test] - fn test_preserve_invalid_config_and_exit_keeps_original_file() { - let temp_dir = match TempDir::new() { - Ok(value) => value, - Err(error) => panic!("failed to create temp dir: {error}"), - }; - let config_path = temp_dir.path().join(DEFAULT_CONFIG_FILE_NAME); - let invalid_content = "{invalid-json"; - - if let Err(error) = fs::write(&config_path, invalid_content) { - panic!("failed to write invalid config fixture: {error}"); - } - - let logger = create_logger("ConfigLoaderTest", None); - let result = - preserve_invalid_config_and_exit(&config_path, &logger, vec!["Invalid JSON".into()]); - - assert!(!result.valid); - assert!(result.exists); - assert!(result.should_exit); - assert_eq!(result.errors, vec!["Invalid JSON".to_string()]); - - let retained = match fs::read_to_string(&config_path) { - Ok(value) => value, - Err(error) => panic!("failed to read retained config: {error}"), - }; - assert_eq!(retained, invalid_content); - } -} - -// =========================================================================== -// NAPI binding layer (only compiled with --features napi) -// =========================================================================== - -#[cfg(feature = "napi")] -mod napi_binding { - use super::*; - use napi_derive::napi; - - /// Load and merge user configuration from the given cwd directory. - /// Returns the merged config as a JSON string. - #[napi] - pub fn load_user_config(cwd: String) -> napi::Result { - let path = std::path::Path::new(&cwd); - let result = super::load_user_config(path).map_err(napi::Error::from_reason)?; - serde_json::to_string(&result.config).map_err(|e| napi::Error::from_reason(e.to_string())) - } - - /// Get the global config file path (~/.aindex/.tnmsc.json). - #[napi] - pub fn get_global_config_path_str() -> napi::Result { - get_required_global_config_path() - .map(|path| path.to_string_lossy().into_owned()) - .map_err(napi::Error::from_reason) - } - - /// Merge two config JSON strings. `over` fields take priority over `base`. - #[napi] - pub fn merge_configs(base_json: String, over_json: String) -> napi::Result { - let base: UserConfigFile = serde_json::from_str(&base_json) - .map_err(|e| napi::Error::from_reason(format!("base: {e}")))?; - let over: UserConfigFile = serde_json::from_str(&over_json) - .map_err(|e| napi::Error::from_reason(format!("over: {e}")))?; - let merged = merge_configs_pair(&base, &over); - serde_json::to_string(&merged).map_err(|e| napi::Error::from_reason(e.to_string())) - } - - /// Load config from a specific file path. Returns JSON string or null if not found. - #[napi] - pub fn load_config_from_file(file_path: String) -> napi::Result> { - let loader = ConfigLoader::with_defaults(); - let result = loader.load_from_file(std::path::Path::new(&file_path)); - if !result.found { - return Ok(None); - } - let json = serde_json::to_string(&result.config) - .map_err(|e| napi::Error::from_reason(e.to_string()))?; - Ok(Some(json)) - } -} diff --git a/cli/src/core/config/series_filter.rs b/cli/src/core/config/series_filter.rs deleted file mode 100644 index 0a7765ee..00000000 --- a/cli/src/core/config/series_filter.rs +++ /dev/null @@ -1,228 +0,0 @@ -//! Series-based filtering helpers (NAPI-exported). -//! -//! Mirrors the pure-TS implementations in `seriesFilter.ts`. -//! Each function is gated behind the `napi` feature so the crate -//! still compiles as a plain Rust library without Node bindings. - -use std::collections::{HashMap, HashSet}; - -// --------------------------------------------------------------------------- -// Core logic (always available) -// --------------------------------------------------------------------------- - -/// Compute the effective includeSeries as the set union of two optional arrays. -/// Returns an empty vec when both are `None` (no filtering — all items pass). -pub fn resolve_effective_include_series_core( - top_level: Option<&[String]>, - type_specific: Option<&[String]>, -) -> Vec { - match (top_level, type_specific) { - (None, None) => Vec::new(), - (Some(a), None) => a - .iter() - .collect::>() - .into_iter() - .cloned() - .collect(), - (None, Some(b)) => b - .iter() - .collect::>() - .into_iter() - .cloned() - .collect(), - (Some(a), Some(b)) => { - let mut set = HashSet::new(); - for s in a.iter().chain(b.iter()) { - set.insert(s.clone()); - } - set.into_iter().collect() - } - } -} - -/// Determine whether a prompt item should be included. -/// -/// - `None` seri_name → always included -/// - empty effective list → always included (no filtering configured) -/// - single string → included iff member of the list -/// - array → included iff any element intersects the list -pub fn matches_series_core( - seri_name: Option<&SeriName>, - effective_include_series: &[String], -) -> bool { - let seri = match seri_name { - None => return true, - Some(s) => s, - }; - if effective_include_series.is_empty() { - return true; - } - let set: HashSet<&str> = effective_include_series - .iter() - .map(String::as_str) - .collect(); - match seri { - SeriName::Single(s) => set.contains(s.as_str()), - SeriName::Multiple(arr) => arr.iter().any(|s| set.contains(s.as_str())), - } -} - -/// Deep-merge two optional subSeries records. -/// For each key present in either record the result is the set union of both -/// value arrays. Returns an empty map when both are `None`. -pub fn resolve_sub_series_core( - top_level: Option<&HashMap>>, - type_specific: Option<&HashMap>>, -) -> HashMap> { - match (top_level, type_specific) { - (None, None) => HashMap::new(), - (Some(a), None) => a.clone(), - (None, Some(b)) => b.clone(), - (Some(a), Some(b)) => { - let mut merged = a.clone(); - for (key, values) in b { - let entry = merged.entry(key.clone()).or_default(); - let mut set: HashSet = entry.drain(..).collect(); - for v in values { - set.insert(v.clone()); - } - *entry = set.into_iter().collect(); - } - merged - } - } -} - -/// Wrapper enum for the `seriName` parameter (string or string array). -pub enum SeriName { - Single(String), - Multiple(Vec), -} - -// --------------------------------------------------------------------------- -// NAPI binding layer -// --------------------------------------------------------------------------- - -#[cfg(feature = "napi")] -mod napi_binding { - use std::collections::HashMap; - - use napi::Either; - use napi_derive::napi; - - use super::*; - - /// Determine whether a prompt item should be included based on its - /// `seriName` and the effective `includeSeries` list. - #[napi] - pub fn matches_series( - seri_name: Option>>, - effective_include_series: Vec, - ) -> bool { - let seri = seri_name.map(|e| match e { - Either::A(s) => SeriName::Single(s), - Either::B(arr) => SeriName::Multiple(arr), - }); - matches_series_core(seri.as_ref(), &effective_include_series) - } - - /// Compute the effective includeSeries as the set union of top-level and - /// type-specific arrays. - #[napi] - pub fn resolve_effective_include_series( - top_level: Option>, - type_specific: Option>, - ) -> Vec { - resolve_effective_include_series_core(top_level.as_deref(), type_specific.as_deref()) - } - - /// Deep-merge two optional subSeries records. - #[napi] - pub fn resolve_sub_series( - top_level: Option>>, - type_specific: Option>>, - ) -> HashMap> { - resolve_sub_series_core(top_level.as_ref(), type_specific.as_ref()) - } -} - -// --------------------------------------------------------------------------- -// Tests -// --------------------------------------------------------------------------- - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_resolve_effective_both_none() { - let result = resolve_effective_include_series_core(None, None); - assert!(result.is_empty()); - } - - #[test] - fn test_resolve_effective_union() { - let a = vec!["x".into(), "y".into()]; - let b = vec!["y".into(), "z".into()]; - let mut result = resolve_effective_include_series_core(Some(&a), Some(&b)); - result.sort(); - assert_eq!(result, vec!["x", "y", "z"]); - } - - #[test] - fn test_matches_series_none_seri() { - assert!(matches_series_core(None, &["a".into()])); - } - - #[test] - fn test_matches_series_empty_list() { - let seri = SeriName::Single("a".into()); - assert!(matches_series_core(Some(&seri), &[])); - } - - #[test] - fn test_matches_series_string_hit() { - let seri = SeriName::Single("a".into()); - assert!(matches_series_core(Some(&seri), &["a".into(), "b".into()])); - } - - #[test] - fn test_matches_series_string_miss() { - let seri = SeriName::Single("c".into()); - assert!(!matches_series_core(Some(&seri), &["a".into(), "b".into()])); - } - - #[test] - fn test_matches_series_array_intersection() { - let seri = SeriName::Multiple(vec!["c".into(), "a".into()]); - assert!(matches_series_core(Some(&seri), &["a".into(), "b".into()])); - } - - #[test] - fn test_matches_series_array_no_intersection() { - let seri = SeriName::Multiple(vec!["c".into(), "d".into()]); - assert!(!matches_series_core(Some(&seri), &["a".into(), "b".into()])); - } - - #[test] - fn test_resolve_sub_series_both_none() { - let result = resolve_sub_series_core(None, None); - assert!(result.is_empty()); - } - - #[test] - fn test_resolve_sub_series_merge() { - let mut a = HashMap::new(); - a.insert("k".into(), vec!["v1".into()]); - let mut b = HashMap::new(); - b.insert("k".into(), vec!["v1".into(), "v2".into()]); - b.insert("k2".into(), vec!["v3".into()]); - - let result = resolve_sub_series_core(Some(&a), Some(&b)); - assert_eq!(result.len(), 2); - let mut k_vals = result["k"].clone(); - k_vals.sort(); - assert_eq!(k_vals, vec!["v1", "v2"]); - assert_eq!(result["k2"], vec!["v3"]); - } -} diff --git a/cli/src/core/desk-paths.ts b/cli/src/core/desk-paths.ts deleted file mode 100644 index b66f9505..00000000 --- a/cli/src/core/desk-paths.ts +++ /dev/null @@ -1,179 +0,0 @@ -import type {Buffer} from 'node:buffer' -import {buildFileOperationDiagnostic} from '@/diagnostics' -import {getNativeBinding} from './native-binding' - -export interface DeletionError { - readonly path: string - readonly error: unknown -} - -export interface DeletionResult { - readonly deleted: number - readonly deletedPaths: readonly string[] - readonly errors: readonly DeletionError[] -} - -export interface DeleteTargetsResult { - readonly deletedFiles: readonly string[] - readonly deletedDirs: readonly string[] - readonly fileErrors: readonly DeletionError[] - readonly dirErrors: readonly DeletionError[] -} - -export interface WriteLogger { - readonly trace: (data: object) => void - readonly error: (diagnostic: object) => void -} - -export interface SafeWriteOptions { - readonly fullPath: string - readonly content: string | Buffer - readonly type: string - readonly relativePath: string - readonly dryRun: boolean - readonly logger: WriteLogger -} - -export interface SafeWriteResult { - readonly path: string - readonly success: boolean - readonly skipped?: boolean - readonly error?: Error -} - -interface NativeDeskPathsBinding { - readonly getPlatformFixedDir?: () => string - readonly ensureDir?: (dir: string) => void - readonly existsSync?: (targetPath: string) => boolean - readonly deletePathSync?: (targetPath: string) => void - readonly writeFileSync?: (filePath: string, data: string | Buffer, encoding?: BufferEncoding) => void - readonly readFileSync?: (filePath: string, encoding?: BufferEncoding) => string - readonly deleteFiles?: (files: readonly string[]) => DeletionResult | Promise - readonly deleteDirectories?: (dirs: readonly string[]) => DeletionResult | Promise - readonly deleteEmptyDirectories?: (dirs: readonly string[]) => DeletionResult | Promise - readonly deleteTargets?: (targets: {readonly files?: readonly string[], readonly dirs?: readonly string[]}) => DeleteTargetsResult | Promise -} - -type NativeDeletionResult = DeletionResult & { - readonly deleted_paths?: readonly string[] -} - -type NativeDeleteTargetsResult = DeleteTargetsResult & { - readonly deleted_files?: readonly string[] - readonly deleted_dirs?: readonly string[] - readonly file_errors?: readonly DeletionError[] - readonly dir_errors?: readonly DeletionError[] -} - -function requireNativeDeskPathsBinding(): NativeDeskPathsBinding { - const binding = getNativeBinding() - if (binding == null) { - throw new Error('Native desk-paths binding is required. Build or install the Rust NAPI package before running tnmsc.') - } - return binding -} - -function requireDeskPathsMethod( - methodName: K -): NonNullable { - const binding = requireNativeDeskPathsBinding() - const method = binding[methodName] - if (method == null) { - throw new Error(`Native desk-paths binding is missing "${String(methodName)}". Rebuild the Rust NAPI package before running tnmsc.`) - } - return method -} - -function normalizeDeletionResult(result: NativeDeletionResult): DeletionResult { - return { - deleted: result.deleted, - deletedPaths: result.deletedPaths ?? result.deleted_paths ?? [], - errors: result.errors ?? [] - } -} - -function normalizeDeleteTargetsResult(result: NativeDeleteTargetsResult): DeleteTargetsResult { - return { - deletedFiles: result.deletedFiles ?? result.deleted_files ?? [], - deletedDirs: result.deletedDirs ?? result.deleted_dirs ?? [], - fileErrors: result.fileErrors ?? result.file_errors ?? [], - dirErrors: result.dirErrors ?? result.dir_errors ?? [] - } -} - -export function getPlatformFixedDir(): string { - return requireDeskPathsMethod('getPlatformFixedDir')() -} - -export function ensureDir(dir: string): void { - requireDeskPathsMethod('ensureDir')(dir) -} - -export function existsSync(targetPath: string): boolean { - return requireDeskPathsMethod('existsSync')(targetPath) -} - -export function deletePathSync(targetPath: string): void { - requireDeskPathsMethod('deletePathSync')(targetPath) -} - -export function writeFileSync(filePath: string, data: string | Buffer, encoding: BufferEncoding = 'utf8'): void { - requireDeskPathsMethod('writeFileSync')(filePath, data, encoding) -} - -export function readFileSync(filePath: string, encoding: BufferEncoding = 'utf8'): string { - return requireDeskPathsMethod('readFileSync')(filePath, encoding) -} - -export async function deleteFiles(files: readonly string[]): Promise { - return normalizeDeletionResult(await Promise.resolve(requireDeskPathsMethod('deleteFiles')(files) as NativeDeletionResult)) -} - -export async function deleteDirectories(dirs: readonly string[]): Promise { - return normalizeDeletionResult(await Promise.resolve(requireDeskPathsMethod('deleteDirectories')(dirs) as NativeDeletionResult)) -} - -export async function deleteEmptyDirectories(dirs: readonly string[]): Promise { - return normalizeDeletionResult(await Promise.resolve(requireDeskPathsMethod('deleteEmptyDirectories')(dirs) as NativeDeletionResult)) -} - -export async function deleteTargets(targets: { - readonly files?: readonly string[] - readonly dirs?: readonly string[] -}): Promise { - return normalizeDeleteTargetsResult(await Promise.resolve(requireDeskPathsMethod('deleteTargets')({ - files: targets.files ?? [], - dirs: targets.dirs ?? [] - }) as NativeDeleteTargetsResult)) -} - -export function writeFileSafe(options: SafeWriteOptions): SafeWriteResult { - const {fullPath, content, type, relativePath, dryRun, logger} = options - - if (dryRun) { - logger.trace({action: 'dryRun', type, path: fullPath}) - return {path: relativePath, success: true, skipped: false} - } - - try { - writeFileSync(fullPath, content) - logger.trace({action: 'write', type, path: fullPath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - logger.error(buildFileOperationDiagnostic({ - code: 'OUTPUT_FILE_WRITE_FAILED', - title: `Failed to write ${type} output`, - operation: 'write', - targetKind: `${type} output file`, - path: fullPath, - error: errMsg, - details: { - relativePath, - type - } - })) - return {path: relativePath, success: false, error: error as Error} - } -} diff --git a/cli/src/core/desk_paths.rs b/cli/src/core/desk_paths.rs deleted file mode 100644 index c308fc6e..00000000 --- a/cli/src/core/desk_paths.rs +++ /dev/null @@ -1,623 +0,0 @@ -use std::env; -use std::fs; -use std::io; -use std::path::{Path, PathBuf}; - -use thiserror::Error; - -use crate::core::config; - -const WINDOWS_DRIVE_PREFIX_LEN: usize = 2; - -/// Errors emitted by the desk-paths helpers. -#[derive(Debug, Error)] -pub enum DeskPathsError { - #[error("{0}")] - Io(#[from] io::Error), - #[error("unsupported platform: {0}")] - UnsupportedPlatform(String), -} - -pub type DeskPathsResult = Result; - -/// Platform shim that mirrors the values used by the legacy TS module. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum Platform { - Win32, - Linux, - Darwin, -} - -impl Platform { - fn from_runtime(ctx: &config::RuntimeEnvironmentContext) -> Self { - if ctx.is_wsl { - return Platform::Win32; - } - match env::consts::OS { - "macos" => Platform::Darwin, - "windows" => Platform::Win32, - _ => Platform::Linux, - } - } - - fn is_windows(self) -> bool { - matches!(self, Platform::Win32) - } -} - -pub fn get_platform_fixed_dir() -> DeskPathsResult { - let ctx = config::resolve_runtime_environment(); - let platform = Platform::from_runtime(&ctx); - let target = match platform { - Platform::Win32 => get_windows_fixed_dir(&ctx), - Platform::Darwin => get_home_dir(&ctx) - .join("Library") - .join("Application Support"), - Platform::Linux => get_linux_data_dir(&ctx), - }; - Ok(target.to_string_lossy().into_owned()) -} - -fn get_windows_fixed_dir(ctx: &config::RuntimeEnvironmentContext) -> PathBuf { - let default = get_home_dir(ctx).join("AppData").join("Local"); - let candidate = - env::var("LOCALAPPDATA").unwrap_or_else(|_| default.to_string_lossy().into_owned()); - PathBuf::from(resolve_user_path(&candidate, ctx)) -} - -fn get_linux_data_dir(ctx: &config::RuntimeEnvironmentContext) -> PathBuf { - if let Ok(xdg_data_home) = env::var("XDG_DATA_HOME") - && !xdg_data_home.trim().is_empty() - { - return PathBuf::from(resolve_user_path(&xdg_data_home, ctx)); - } - get_home_dir(ctx).join(".local").join("share") -} - -fn get_home_dir(ctx: &config::RuntimeEnvironmentContext) -> PathBuf { - ctx.effective_home_dir - .as_ref() - .cloned() - .or_else(|| ctx.native_home_dir.clone()) - .unwrap_or_else(|| dirs::home_dir().unwrap_or_else(|| PathBuf::from("/"))) -} - -fn resolve_user_path(raw_path: &str, ctx: &config::RuntimeEnvironmentContext) -> String { - let platform = Platform::from_runtime(ctx); - let home_dir = get_home_dir(ctx); - let expanded = expand_home_directory(raw_path, &home_dir); - if ctx.is_wsl { - if let Some(converted) = convert_windows_path_to_wsl(&expanded) { - return normalize_posix_like_path(&converted, true); - } - return normalize_posix_like_path(&expanded, true); - } - if platform.is_windows() { - normalize_windows_path(&expanded) - } else { - normalize_posix_like_path(&expanded, false) - } -} - -fn expand_home_directory(raw_path: &str, home_dir: &Path) -> String { - if raw_path == "~" { - return normalize_posix_like_path(&home_dir.to_string_lossy(), false); - } - if raw_path.starts_with("~/") || raw_path.starts_with("~\\") { - let suffix = &raw_path[2..]; - let normalized = suffix.replace('\\', "/"); - let mut joined = PathBuf::from(home_dir); - for component in normalized.split('/') { - if component.is_empty() || component == "." { - continue; - } - if component == ".." { - joined.pop(); - } else { - joined.push(component); - } - } - return normalize_posix_like_path(&joined.to_string_lossy(), false); - } - raw_path.to_string() -} - -fn normalize_posix_like_path(raw_path: &str, preserve_slashes: bool) -> String { - let replaced = raw_path.replace('\\', "/"); - let is_absolute = replaced.starts_with('/'); - let mut components = Vec::new(); - for segment in replaced.split('/') { - if segment.is_empty() || segment == "." { - continue; - } - if segment == ".." { - components.pop(); - continue; - } - components.push(segment); - } - let mut normalized = String::new(); - if is_absolute { - normalized.push('/'); - } - normalized.push_str(&components.join("/")); - if normalized.is_empty() { - if is_absolute { - normalized.push('/'); - } else if preserve_slashes { - normalized.push('.'); - } - } - normalized -} - -fn normalize_windows_path(raw_path: &str) -> String { - let replaced = raw_path.replace('/', "\\"); - let mut components = Vec::new(); - let mut rest = replaced.as_str(); - let mut prefix = String::new(); - if rest.len() >= WINDOWS_DRIVE_PREFIX_LEN && rest.as_bytes()[1] == b':' { - prefix = rest[..WINDOWS_DRIVE_PREFIX_LEN].to_ascii_uppercase(); - rest = &rest[WINDOWS_DRIVE_PREFIX_LEN..]; - } - for segment in rest.split('\\') { - if segment.is_empty() || segment == "." { - continue; - } - if segment == ".." { - components.pop(); - continue; - } - components.push(segment); - } - let mut normalized = prefix.clone(); - if !normalized.is_empty() && !components.is_empty() { - normalized.push('\\'); - } - normalized.push_str(&components.join("\\")); - if normalized.is_empty() { - normalized.push('.'); - } - normalized -} - -fn convert_windows_path_to_wsl(raw_path: &str) -> Option { - let bytes = raw_path.as_bytes(); - if bytes.len() < WINDOWS_DRIVE_PREFIX_LEN + 1 || bytes[1] != b':' { - return None; - } - let drive_letter = (bytes[0] as char).to_ascii_lowercase(); - if !drive_letter.is_ascii_alphabetic() { - return None; - } - let mut rest = &raw_path[WINDOWS_DRIVE_PREFIX_LEN..]; - if rest.starts_with('\\') || rest.starts_with('/') { - rest = &rest[1..]; - } - let normalized = rest.replace('\\', "/"); - let prefix = format!("/mnt/{}", drive_letter); - if normalized.is_empty() { - return Some(prefix); - } - Some(format!("{}/{}", prefix, normalized)) -} - -pub fn ensure_dir>(dir: P) -> io::Result<()> { - fs::create_dir_all(dir) -} - -pub fn exists_sync>(path: P) -> bool { - path.as_ref().exists() -} - -pub fn delete_path_sync>(path: P) -> io::Result<()> { - delete_path(path).map(|_| ()) -} - -fn delete_path(path: impl AsRef) -> io::Result { - let path = path.as_ref(); - let metadata = match fs::symlink_metadata(path) { - Ok(metadata) => metadata, - Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(false), - Err(err) => return Err(err), - }; - - if metadata.file_type().is_symlink() { - #[cfg(windows)] - { - return fs::metadata(path) - .map(|resolved| resolved.is_dir()) - .unwrap_or(false) - .then(|| fs::remove_dir(path).or_else(|_| fs::remove_file(path))) - .unwrap_or_else(|| fs::remove_file(path).or_else(|_| fs::remove_dir(path))) - .map(|_| true); - } - #[cfg(not(windows))] - { - return fs::remove_file(path).map(|_| true); - } - } - - if metadata.is_dir() { - fs::remove_dir_all(path).map(|_| true) - } else { - fs::remove_file(path).map(|_| true) - } -} - -pub fn write_file_sync>(path: P, content: &[u8]) -> io::Result<()> { - if let Some(parent) = path.as_ref().parent() { - fs::create_dir_all(parent)?; - } - fs::write(path, content) -} - -pub fn read_file_sync>(path: P) -> io::Result { - fs::read_to_string(&path).map_err(|err| { - io::Error::new( - err.kind(), - format!( - "Failed to read file \"{}\": {}", - path.as_ref().display(), - err - ), - ) - }) -} - -pub struct DeletionError { - pub path: String, - pub error: String, -} - -pub struct DeletionResult { - pub deleted: usize, - pub deleted_paths: Vec, - pub errors: Vec, -} - -pub struct DeleteTargetsResult { - pub deleted_files: Vec, - pub deleted_dirs: Vec, - pub file_errors: Vec, - pub dir_errors: Vec, -} - -fn delete_empty_directory(path: impl AsRef) -> io::Result { - let path = path.as_ref(); - let metadata = match fs::symlink_metadata(path) { - Ok(metadata) => metadata, - Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(false), - Err(err) => return Err(err), - }; - - if metadata.file_type().is_symlink() || !metadata.is_dir() { - return Ok(false); - } - - match fs::remove_dir(path) { - Ok(()) => Ok(true), - Err(err) - if err.kind() == io::ErrorKind::NotFound - || err.kind() == io::ErrorKind::DirectoryNotEmpty => - { - Ok(false) - } - Err(err) => Err(err), - } -} - -pub fn delete_files(paths: &[String]) -> DeletionResult { - let mut result = DeletionResult { - deleted: 0, - deleted_paths: Vec::new(), - errors: Vec::new(), - }; - for path in paths { - match delete_path(Path::new(path)) { - Ok(true) => { - result.deleted += 1; - result.deleted_paths.push(path.clone()); - } - Ok(false) => {} - Err(err) => result.errors.push(DeletionError { - path: path.clone(), - error: err.to_string(), - }), - } - } - result -} - -pub fn delete_directories(paths: &[String]) -> DeletionResult { - let mut sorted_paths = paths.to_vec(); - sorted_paths.sort_by(|a, b| b.len().cmp(&a.len()).then_with(|| b.cmp(a))); - - let mut result = DeletionResult { - deleted: 0, - deleted_paths: Vec::new(), - errors: Vec::new(), - }; - for path in &sorted_paths { - match delete_path(Path::new(path)) { - Ok(true) => { - result.deleted += 1; - result.deleted_paths.push(path.clone()); - } - Ok(false) => {} - Err(err) => result.errors.push(DeletionError { - path: path.clone(), - error: err.to_string(), - }), - } - } - result -} - -pub fn delete_empty_directories(paths: &[String]) -> DeletionResult { - let mut sorted_paths = paths.to_vec(); - sorted_paths.sort_by(|a, b| b.len().cmp(&a.len()).then_with(|| b.cmp(a))); - - let mut result = DeletionResult { - deleted: 0, - deleted_paths: Vec::new(), - errors: Vec::new(), - }; - for path in &sorted_paths { - match delete_empty_directory(Path::new(path)) { - Ok(true) => { - result.deleted += 1; - result.deleted_paths.push(path.clone()); - } - Ok(false) => {} - Err(err) => result.errors.push(DeletionError { - path: path.clone(), - error: err.to_string(), - }), - } - } - result -} - -pub fn delete_targets(files: &[String], dirs: &[String]) -> DeleteTargetsResult { - let file_result = delete_files(files); - let dir_result = delete_directories(dirs); - DeleteTargetsResult { - deleted_files: file_result.deleted_paths, - deleted_dirs: dir_result.deleted_paths, - file_errors: file_result.errors, - dir_errors: dir_result.errors, - } -} - -#[cfg(feature = "napi")] -mod napi_binding { - use napi::bindgen_prelude::*; - use napi_derive::napi; - - use super::DeletionError; - - #[napi] - pub fn get_platform_fixed_dir() -> napi::Result { - super::get_platform_fixed_dir().map_err(|err| napi::Error::from_reason(err.to_string())) - } - - #[napi] - pub fn ensure_dir(path: String) -> napi::Result<()> { - super::ensure_dir(path).map_err(|err| napi::Error::from_reason(err.to_string())) - } - - #[napi] - pub fn exists_sync(path: String) -> bool { - super::exists_sync(path) - } - - #[napi] - pub fn delete_path_sync(path: String) -> napi::Result<()> { - super::delete_path_sync(path).map_err(|err| napi::Error::from_reason(err.to_string())) - } - - #[napi] - pub fn write_file_sync( - path: String, - data: Either, - encoding: Option, - ) -> napi::Result<()> { - if let Some(value) = encoding.as_deref() { - let normalized = value.to_ascii_lowercase(); - if normalized != "utf8" && normalized != "utf-8" { - return Err(napi::Error::from_reason(format!( - "unsupported encoding: {}", - value - ))); - } - } - - let bytes = match data { - Either::A(text) => text.into_bytes(), - Either::B(buffer) => buffer.to_vec(), - }; - super::write_file_sync(path, &bytes) - .map_err(|err| napi::Error::from_reason(err.to_string())) - } - - #[napi] - pub fn read_file_sync(path: String, encoding: Option) -> napi::Result { - if let Some(value) = encoding.as_deref() { - let normalized = value.to_ascii_lowercase(); - if normalized != "utf8" && normalized != "utf-8" { - return Err(napi::Error::from_reason(format!( - "unsupported encoding: {}", - value - ))); - } - } - super::read_file_sync(path).map_err(|err| napi::Error::from_reason(err.to_string())) - } - - #[napi(object)] - pub struct NapiDeletionError { - pub path: String, - pub error: String, - } - - #[napi(object)] - pub struct NapiDeletionResult { - pub deleted: u32, - #[napi(js_name = "deletedPaths")] - pub deleted_paths: Vec, - pub errors: Vec, - } - - #[napi(object)] - pub struct NapiDeleteTargetsResult { - #[napi(js_name = "deletedFiles")] - pub deleted_files: Vec, - #[napi(js_name = "deletedDirs")] - pub deleted_dirs: Vec, - #[napi(js_name = "fileErrors")] - pub file_errors: Vec, - #[napi(js_name = "dirErrors")] - pub dir_errors: Vec, - } - - fn to_napi_error(err: DeletionError) -> NapiDeletionError { - NapiDeletionError { - path: err.path, - error: err.error, - } - } - - #[napi] - pub fn delete_files(paths: Vec) -> NapiDeletionResult { - let result = super::delete_files(&paths); - NapiDeletionResult { - deleted: result.deleted as u32, - deleted_paths: result.deleted_paths, - errors: result.errors.into_iter().map(to_napi_error).collect(), - } - } - - #[napi] - pub fn delete_directories(paths: Vec) -> NapiDeletionResult { - let result = super::delete_directories(&paths); - NapiDeletionResult { - deleted: result.deleted as u32, - deleted_paths: result.deleted_paths, - errors: result.errors.into_iter().map(to_napi_error).collect(), - } - } - - #[napi] - pub fn delete_empty_directories(paths: Vec) -> NapiDeletionResult { - let result = super::delete_empty_directories(&paths); - NapiDeletionResult { - deleted: result.deleted as u32, - deleted_paths: result.deleted_paths, - errors: result.errors.into_iter().map(to_napi_error).collect(), - } - } - - #[napi(object)] - pub struct DeleteTargetsInput { - pub files: Option>, - pub dirs: Option>, - } - - #[napi] - pub fn delete_targets(paths: DeleteTargetsInput) -> NapiDeleteTargetsResult { - let files = paths.files.unwrap_or_default(); - let dirs = paths.dirs.unwrap_or_default(); - let result = super::delete_targets(&files, &dirs); - NapiDeleteTargetsResult { - deleted_files: result.deleted_files, - deleted_dirs: result.deleted_dirs, - file_errors: result.file_errors.into_iter().map(to_napi_error).collect(), - dir_errors: result.dir_errors.into_iter().map(to_napi_error).collect(), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::fs; - use tempfile::tempdir; - - #[test] - fn delete_targets_batch() { - let dir = tempdir().unwrap(); - let files_dir = dir.path().join("files"); - let dirs_dir = dir.path().join("dirs"); - fs::create_dir_all(&files_dir).unwrap(); - fs::create_dir_all(dirs_dir.join("nested")).unwrap(); - let file = files_dir.join("artifact.txt"); - fs::write(&file, b"data").unwrap(); - let leaf = dirs_dir.join("nested").join("inner.txt"); - fs::write(&leaf, b"payload").unwrap(); - - let result = delete_targets( - &[file.to_string_lossy().into_owned()], - &[dirs_dir.to_string_lossy().into_owned()], - ); - - assert_eq!( - result.deleted_files, - vec![file.to_string_lossy().into_owned()] - ); - assert!( - result - .deleted_dirs - .contains(&dirs_dir.to_string_lossy().into_owned()) - ); - assert!(result.file_errors.is_empty()); - assert!(result.dir_errors.is_empty()); - } - - #[test] - fn delete_empty_directories_only_removes_empty_paths() { - let dir = tempdir().unwrap(); - let parent_dir = dir.path().join("empty-parent"); - let child_dir = parent_dir.join("leaf"); - let non_empty_dir = dir.path().join("non-empty"); - fs::create_dir_all(&child_dir).unwrap(); - fs::create_dir_all(&non_empty_dir).unwrap(); - fs::write(non_empty_dir.join("keep.txt"), b"keep").unwrap(); - - let result = delete_empty_directories(&[ - parent_dir.to_string_lossy().into_owned(), - child_dir.to_string_lossy().into_owned(), - non_empty_dir.to_string_lossy().into_owned(), - ]); - - assert_eq!(result.deleted, 2); - assert_eq!( - result.deleted_paths, - vec![ - child_dir.to_string_lossy().into_owned(), - parent_dir.to_string_lossy().into_owned(), - ] - ); - assert!(result.errors.is_empty()); - assert!(!parent_dir.exists()); - assert!(non_empty_dir.exists()); - } - - #[test] - fn delete_empty_directories_skips_non_empty_and_missing_paths() { - let dir = tempdir().unwrap(); - let target_dir = dir.path().join("maybe-empty"); - fs::create_dir_all(&target_dir).unwrap(); - fs::write(target_dir.join("new-file.txt"), b"late write").unwrap(); - - let result = delete_empty_directories(&[ - target_dir.to_string_lossy().into_owned(), - dir.path().join("missing").to_string_lossy().into_owned(), - ]); - - assert_eq!(result.deleted, 0); - assert!(result.deleted_paths.is_empty()); - assert!(result.errors.is_empty()); - assert!(target_dir.exists()); - } -} diff --git a/cli/src/core/input_plugins.rs b/cli/src/core/input_plugins.rs deleted file mode 100644 index 6ceb47ea..00000000 --- a/cli/src/core/input_plugins.rs +++ /dev/null @@ -1,9 +0,0 @@ -#![deny(clippy::all)] - -//! All 17 input plugins for the tnmsc pipeline. -//! -//! Plugins are grouped by type: -//! - File readers (workspace, gitignore, editorconfig, vscode, jetbrains) -//! - MDX directory scanners (fast-command, sub-agent, rule, global-memory) -//! - Complex plugins (shadow-project, skill, project-prompt, readme) -//! - Effect plugins (md-cleanup, orphan-cleanup, skill-dist-cleanup) diff --git a/cli/src/core/mod.rs b/cli/src/core/mod.rs deleted file mode 100644 index 5881df2c..00000000 --- a/cli/src/core/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub mod cleanup; -pub mod config; -pub mod desk_paths; -pub mod input_plugins; -pub mod plugin_shared; diff --git a/cli/src/core/native-binding.ts b/cli/src/core/native-binding.ts deleted file mode 100644 index d761cef8..00000000 --- a/cli/src/core/native-binding.ts +++ /dev/null @@ -1,63 +0,0 @@ -import {createRequire} from 'node:module' -import process from 'node:process' - -function shouldSkipNativeBinding(): boolean { - if (process.env['TNMSC_FORCE_NATIVE_BINDING'] === '1') return false - if (process.env['TNMSC_DISABLE_NATIVE_BINDING'] === '1') return true - - return process.env['NODE_ENV'] === 'test' || process.env['VITEST'] != null || process.env['VITEST_WORKER_ID'] != null -} - -export function tryLoadNativeBinding(): T | undefined { - const testGlobals = globalThis as typeof globalThis & {__TNMSC_TEST_NATIVE_BINDING__?: object} - const testBinding: unknown = testGlobals.__TNMSC_TEST_NATIVE_BINDING__ - if (testBinding != null && typeof testBinding === 'object') return testBinding as T - if (shouldSkipNativeBinding()) return void 0 - - const suffixMap: Readonly> = { - 'win32-x64': 'win32-x64-msvc', - 'linux-x64': 'linux-x64-gnu', - 'linux-arm64': 'linux-arm64-gnu', - 'darwin-arm64': 'darwin-arm64', - 'darwin-x64': 'darwin-x64' - } - const suffix = suffixMap[`${process.platform}-${process.arch}`] - if (suffix == null) return void 0 - - try { - const _require = createRequire(import.meta.url) - const packageName = `@truenine/memory-sync-cli-${suffix}` - const binaryFile = `napi-memory-sync-cli.${suffix}.node` - const candidates = [ - packageName, - `${packageName}/${binaryFile}`, - `./${binaryFile}`, - `../npm/${suffix}`, - `../npm/${suffix}/${binaryFile}`, - `../../npm/${suffix}`, - `../../npm/${suffix}/${binaryFile}` - ] - - for (const specifier of candidates) { - try { - const loaded = _require(specifier) as unknown - const possibleBindings = [ - (loaded as {config?: unknown})?.config, - (loaded as {default?: {config?: unknown}})?.default?.config, - (loaded as {default?: unknown})?.default, - loaded - ] - - for (const candidate of possibleBindings) { - if (candidate != null && typeof candidate === 'object') return candidate as T - } - } catch {} - } - } catch {} - - return void 0 -} - -export function getNativeBinding(): T | undefined { - return tryLoadNativeBinding() -} diff --git a/cli/src/core/plugin_shared.rs b/cli/src/core/plugin_shared.rs deleted file mode 100644 index ead00550..00000000 --- a/cli/src/core/plugin_shared.rs +++ /dev/null @@ -1,623 +0,0 @@ -//! Shared types and data structures for tnmsc plugins. -//! -//! Defines `CollectedInputContext`, `RelativePath`, plugin traits, -//! and other types shared between input plugins, CLI, and output runtime. - -use serde::{Deserialize, Serialize}; -use serde_json::Value; -use std::collections::HashMap; -use std::path::PathBuf; - -// --------------------------------------------------------------------------- -// Enums -// --------------------------------------------------------------------------- - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub enum PluginKind { - Input, - Output, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub enum PromptKind { - GlobalMemory, - ProjectRootMemory, - ProjectChildrenMemory, - FastCommand, - SubAgent, - Skill, - SkillChildDoc, - SkillResource, - SkillMcpConfig, - Readme, - Rule, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum RuleScope { - Project, - Global, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub enum FilePathKind { - Relative, - Absolute, - Root, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub enum IDEKind { - VSCode, - IntellijIDEA, - Git, - EditorConfig, - Original, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub enum NamingCaseKind { - CamelCase, - PascalCase, - SnakeCase, - KebabCase, - UpperCase, - LowerCase, - Original, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum SkillResourceEncoding { - Text, - Base64, -} - -// --------------------------------------------------------------------------- -// Path types -// --------------------------------------------------------------------------- - -/// Relative path with base path for computing absolute paths. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct RelativePath { - pub path_kind: FilePathKind, - pub path: String, - pub base_path: String, - /// Pre-computed absolute path for serialization to Node.js - #[serde(skip_serializing_if = "Option::is_none")] - pub absolute_path: Option, - /// Pre-computed directory name for serialization to Node.js - #[serde(skip_serializing_if = "Option::is_none")] - pub directory_name: Option, -} - -impl RelativePath { - pub fn new(path: &str, base_path: &str) -> Self { - let abs = PathBuf::from(base_path).join(path); - let dir_name = PathBuf::from(path) - .parent() - .map(|p| p.to_string_lossy().into_owned()) - .unwrap_or_default(); - Self { - path_kind: FilePathKind::Relative, - path: path.to_string(), - base_path: base_path.to_string(), - absolute_path: Some(abs.to_string_lossy().into_owned()), - directory_name: Some(dir_name), - } - } - - pub fn get_absolute_path(&self) -> String { - self.absolute_path.clone().unwrap_or_else(|| { - PathBuf::from(&self.base_path) - .join(&self.path) - .to_string_lossy() - .into_owned() - }) - } - - pub fn get_directory_name(&self) -> String { - self.directory_name.clone().unwrap_or_else(|| { - PathBuf::from(&self.path) - .parent() - .map(|p| p.to_string_lossy().into_owned()) - .unwrap_or_default() - }) - } -} - -/// Root path (workspace root). -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct RootPath { - pub path_kind: FilePathKind, - pub path: String, -} - -impl RootPath { - pub fn new(path: &str) -> Self { - Self { - path_kind: FilePathKind::Root, - path: path.to_string(), - } - } -} - -// --------------------------------------------------------------------------- -// YAML front matter types -// --------------------------------------------------------------------------- - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct YAMLFrontMatter { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub naming_case: Option, - #[serde(flatten)] - pub extra: HashMap, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CommonYAMLFrontMatter { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub description: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub naming_case: Option, - #[serde(flatten)] - pub extra: HashMap, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct RuleYAMLFrontMatter { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub description: Option, - #[serde(default)] - pub globs: Vec, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub scope: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub seri_name: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub naming_case: Option, - #[serde(flatten)] - pub extra: HashMap, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct FastCommandYAMLFrontMatter { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub description: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub argument_hint: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub allow_tools: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub naming_case: Option, - #[serde(flatten)] - pub extra: HashMap, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SubAgentYAMLFrontMatter { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub name: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub description: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub model: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub color: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub argument_hint: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub allow_tools: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub naming_case: Option, - #[serde(flatten)] - pub extra: HashMap, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SkillYAMLFrontMatter { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub name: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub description: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub display_name: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub author: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub version: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub keywords: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub allow_tools: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub naming_case: Option, - #[serde(flatten)] - pub extra: HashMap, -} - -// --------------------------------------------------------------------------- -// Prompt types -// --------------------------------------------------------------------------- - -/// Rule prompt with glob patterns. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct RulePrompt { - #[serde(rename = "type")] - pub prompt_type: PromptKind, - pub content: String, - pub length: usize, - pub dir: RelativePath, - pub series: String, - pub rule_name: String, - pub globs: Vec, - pub scope: RuleScope, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub seri_name: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub yaml_front_matter: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub raw_mdx_content: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub markdown_contents: Option>, -} - -/// Fast command prompt. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct FastCommandPrompt { - #[serde(rename = "type")] - pub prompt_type: PromptKind, - pub content: String, - pub length: usize, - pub dir: RelativePath, - pub command_name: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub series: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub global_only: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub yaml_front_matter: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub raw_mdx_content: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub markdown_contents: Option>, -} - -/// Sub-agent prompt. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SubAgentPrompt { - #[serde(rename = "type")] - pub prompt_type: PromptKind, - pub content: String, - pub length: usize, - pub dir: RelativePath, - pub agent_name: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub series: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub yaml_front_matter: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub raw_mdx_content: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub markdown_contents: Option>, -} - -/// Skill child document. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SkillChildDoc { - #[serde(rename = "type")] - pub prompt_type: PromptKind, - pub content: String, - pub length: usize, - pub relative_path: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub markdown_contents: Option>, -} - -/// Skill resource file. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SkillResource { - #[serde(rename = "type")] - pub prompt_type: PromptKind, - pub extension: String, - pub file_name: String, - pub relative_path: String, - pub content: String, - pub encoding: SkillResourceEncoding, - pub length: usize, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub mime_type: Option, -} - -/// MCP server configuration entry. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct McpServerConfig { - pub command: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub args: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub env: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub disabled: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub auto_approve: Option>, -} - -/// Skill MCP configuration (mcp.json). -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SkillMcpConfig { - #[serde(rename = "type")] - pub prompt_type: PromptKind, - pub mcp_servers: HashMap, - pub raw_content: String, -} - -/// Skill prompt. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SkillPrompt { - #[serde(rename = "type")] - pub prompt_type: PromptKind, - pub content: String, - pub length: usize, - pub dir: RelativePath, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub yaml_front_matter: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub mcp_config: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub child_docs: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub resources: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub markdown_contents: Option>, -} - -/// Global memory prompt. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct GlobalMemoryPrompt { - #[serde(rename = "type")] - pub prompt_type: PromptKind, - pub content: String, - pub length: usize, - pub dir: RelativePath, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub markdown_contents: Option>, -} - -/// Readme prompt. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ReadmePrompt { - #[serde(rename = "type")] - pub prompt_type: PromptKind, - pub content: String, - pub length: usize, - pub dir: RelativePath, - pub project_name: String, - pub target_dir: RelativePath, - pub is_root: bool, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub markdown_contents: Option>, -} - -// --------------------------------------------------------------------------- -// IDE config types -// --------------------------------------------------------------------------- - -/// IDE configuration file. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ProjectIDEConfigFile { - #[serde(rename = "type")] - pub ide_type: IDEKind, - pub content: String, - pub length: usize, - pub dir: RelativePath, - pub file_path_kind: FilePathKind, -} - -// --------------------------------------------------------------------------- -// Project & Workspace -// --------------------------------------------------------------------------- - -/// Project within a workspace. -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Project { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub name: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub dir_from_workspace_path: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub root_memory_prompt: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub child_memory_prompts: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub is_prompt_source_project: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub project_config: Option, -} - -/// Workspace containing projects. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Workspace { - pub directory: RootPath, - #[serde(default)] - pub projects: Vec, -} - -// --------------------------------------------------------------------------- -// CollectedInputContext — the main bridge type -// --------------------------------------------------------------------------- - -/// All collected input information, serialized from Rust to Node.js output runtime. -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CollectedInputContext { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub workspace: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub vscode_config_files: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub jetbrains_config_files: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub editor_config_files: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub fast_commands: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub sub_agents: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub skills: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub rules: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub global_memory: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub global_git_ignore: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub shadow_git_exclude: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub shadow_source_project_dir: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub readme_prompts: Option>, -} - -// --------------------------------------------------------------------------- -// Tests -// --------------------------------------------------------------------------- - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_relative_path() { - let rp = RelativePath::new("src/skills/test.mdx", "/home/user/workspace/aindex"); - assert_eq!(rp.path, "src/skills/test.mdx"); - assert_eq!(rp.base_path, "/home/user/workspace/aindex"); - assert!(rp.get_absolute_path().contains("src/skills/test.mdx")); - assert_eq!(rp.get_directory_name(), "src/skills"); - } - - #[test] - fn test_collected_input_context_default() { - let ctx = CollectedInputContext::default(); - assert!(ctx.workspace.is_none()); - assert!(ctx.fast_commands.is_none()); - } - - #[test] - fn test_collected_input_context_serialize() { - let ctx = CollectedInputContext { - workspace: Some(Workspace { - directory: RootPath::new("/workspace"), - projects: vec![], - }), - global_git_ignore: Some("node_modules/\n".to_string()), - ..Default::default() - }; - let json = serde_json::to_string(&ctx).unwrap(); - assert!(json.contains("workspace")); - assert!(json.contains("globalGitIgnore")); - // Fields that are None should not appear - assert!(!json.contains("fastCommands")); - } - - #[test] - fn test_collected_input_context_roundtrip() { - let ctx = CollectedInputContext { - workspace: Some(Workspace { - directory: RootPath::new("/workspace"), - projects: vec![Project { - name: Some("test-project".into()), - ..Default::default() - }], - }), - fast_commands: Some(vec![FastCommandPrompt { - prompt_type: PromptKind::FastCommand, - content: "# Test Command\n\nDo something.".into(), - length: 30, - dir: RelativePath::new("commands/test.mdx", "/workspace/aindex/dist"), - command_name: "test".into(), - series: Some("default".into()), - global_only: None, - yaml_front_matter: Some(FastCommandYAMLFrontMatter { - description: Some("A test command".into()), - ..Default::default() - }), - raw_mdx_content: None, - markdown_contents: None, - }]), - ..Default::default() - }; - - let json = serde_json::to_string_pretty(&ctx).unwrap(); - let parsed: CollectedInputContext = serde_json::from_str(&json).unwrap(); - assert_eq!(parsed.workspace.as_ref().unwrap().projects.len(), 1); - assert_eq!(parsed.fast_commands.as_ref().unwrap().len(), 1); - assert_eq!( - parsed.fast_commands.as_ref().unwrap()[0].command_name, - "test" - ); - } - - #[test] - fn test_rule_prompt_serialize() { - let rule = RulePrompt { - prompt_type: PromptKind::Rule, - content: "# Rule\n\nDo this.".into(), - length: 17, - dir: RelativePath::new("rules/default/test.mdx", "/workspace/aindex/dist"), - series: "default".into(), - rule_name: "test".into(), - globs: vec!["**/*.ts".into(), "**/*.tsx".into()], - scope: RuleScope::Project, - seri_name: None, - yaml_front_matter: None, - raw_mdx_content: None, - markdown_contents: None, - }; - let json = serde_json::to_string(&rule).unwrap(); - assert!(json.contains("\"type\":\"Rule\"")); - assert!(json.contains("\"globs\"")); - } - - #[test] - fn test_enums_serialize() { - assert_eq!( - serde_json::to_string(&PromptKind::FastCommand).unwrap(), - "\"FastCommand\"" - ); - assert_eq!( - serde_json::to_string(&RuleScope::Global).unwrap(), - "\"global\"" - ); - assert_eq!( - serde_json::to_string(&IDEKind::VSCode).unwrap(), - "\"VSCode\"" - ); - assert_eq!( - serde_json::to_string(&SkillResourceEncoding::Base64).unwrap(), - "\"base64\"" - ); - } -} diff --git a/cli/src/diagnostic_helpers.rs b/cli/src/diagnostic_helpers.rs deleted file mode 100644 index c01bc7f9..00000000 --- a/cli/src/diagnostic_helpers.rs +++ /dev/null @@ -1,32 +0,0 @@ -use serde_json::{Map, Value}; -use tnmsc_logger::LoggerDiagnosticInput; - -pub(crate) fn line(value: impl Into) -> Vec { - vec![value.into()] -} - -pub(crate) fn diagnostic( - code: impl Into, - title: impl Into, - root_cause: Vec, - exact_fix: Option>, - possible_fixes: Option>>, - details: Option>, -) -> LoggerDiagnosticInput { - LoggerDiagnosticInput { - code: code.into(), - title: title.into(), - root_cause, - exact_fix, - possible_fixes, - details, - } -} - -pub(crate) fn optional_details(value: Value) -> Option> { - match value { - Value::Object(map) if !map.is_empty() => Some(map), - Value::Object(_) => None, - _ => None, - } -} diff --git a/cli/src/diagnostics.test.ts b/cli/src/diagnostics.test.ts deleted file mode 100644 index 053d1ec9..00000000 --- a/cli/src/diagnostics.test.ts +++ /dev/null @@ -1,54 +0,0 @@ -import {describe, expect, it} from 'vitest' -import {buildFileOperationDiagnostic} from './diagnostics' - -describe('buildFileOperationDiagnostic', () => { - it('emits Windows-specific cleanup guidance for EPERM directory deletions', () => { - const diagnostic = buildFileOperationDiagnostic({ - code: 'CLEANUP_DIRECTORY_DELETE_FAILED', - title: 'Cleanup could not delete a directory', - operation: 'delete', - targetKind: 'directory', - path: 'C:\\workspace\\.opencode\\skills', - error: 'EPERM, Permission denied: \\\\?\\C:\\workspace\\.opencode\\skills', - platform: 'win32' - }) - - expect(diagnostic.exactFix).toEqual([ - 'Close any process that is using "C:\\workspace\\.opencode\\skills", delete the stale directory, and rerun tnmsc.', - 'Common lockers on Windows include editors, terminals, antivirus scanners, sync clients, and AI tools watching generated files.' - ]) - expect(diagnostic.possibleFixes).toEqual([ - ['Use Resource Monitor or Process Explorer to find which process holds a handle under "C:\\workspace\\.opencode\\skills".'], - ['Make sure no shell, editor tab, or file watcher is currently opened inside "C:\\workspace\\.opencode\\skills" or one of its children.'], - ['If antivirus or cloud sync is scanning generated outputs, wait for it to release the directory or exclude this output path.'] - ]) - expect(diagnostic.details).toMatchObject({ - platform: 'win32', - errorMessage: 'EPERM, Permission denied: \\\\?\\C:\\workspace\\.opencode\\skills' - }) - }) - - it('keeps generic guidance for non-Windows or non-permission errors', () => { - const diagnostic = buildFileOperationDiagnostic({ - code: 'OUTPUT_FILE_WRITE_FAILED', - title: 'Failed to write output', - operation: 'write', - targetKind: 'file', - path: '/tmp/output.md', - error: 'ENOENT: no such file or directory', - platform: 'linux' - }) - - expect(diagnostic.exactFix).toEqual([ - 'Verify that "/tmp/output.md" exists, has the expected type, and is accessible to tnmsc.' - ]) - expect(diagnostic.possibleFixes).toEqual([ - ['Check file permissions and ownership for the target path.'], - ['Confirm that another process did not delete, move, or lock the target path.'] - ]) - expect(diagnostic.details).toMatchObject({ - platform: 'linux', - errorMessage: 'ENOENT: no such file or directory' - }) - }) -}) diff --git a/cli/src/diagnostics.ts b/cli/src/diagnostics.ts deleted file mode 100644 index 15634a9b..00000000 --- a/cli/src/diagnostics.ts +++ /dev/null @@ -1,415 +0,0 @@ -import type { - DiagnosticLines, - LoggerDiagnosticInput, - LoggerDiagnosticRecord -} from './plugins/plugin-core' -import type {ProtectedPathViolation} from './ProtectedDeletionGuard' -import process from 'node:process' - -export function diagnosticLines(firstLine: string, ...otherLines: string[]): DiagnosticLines { - return [firstLine, ...otherLines] -} - -export function toErrorMessage(error: unknown): string { - return error instanceof Error ? error.message : String(error) -} - -export function splitDiagnosticText(text: string): DiagnosticLines { - const lines = text - .split(/\r?\n/u) - .map(line => line.trimEnd()) - .filter(line => line.length > 0) - - if (lines.length === 0) return diagnosticLines('No diagnostic details were provided.') - const [firstLine, ...otherLines] = lines - if (firstLine == null) return diagnosticLines('No diagnostic details were provided.') - return diagnosticLines(firstLine, ...otherLines) -} - -export function buildDiagnostic(input: LoggerDiagnosticInput): LoggerDiagnosticInput { - return input -} - -interface DiagnosticFailure { - readonly path: string - readonly error: unknown - readonly details?: Record | undefined -} - -interface FileOperationDiagnosticOptions { - readonly code: string - readonly title: string - readonly operation: string - readonly targetKind: string - readonly path: string - readonly error: unknown - readonly platform?: NodeJS.Platform | undefined - readonly exactFix?: DiagnosticLines | undefined - readonly possibleFixes?: readonly DiagnosticLines[] | undefined - readonly details?: Record | undefined -} - -interface FileOperationAdvice { - readonly exactFix: DiagnosticLines - readonly possibleFixes: readonly DiagnosticLines[] -} - -function normalizeErrorMessage(error: unknown): string { - return toErrorMessage(error).toLowerCase() -} - -function isWindowsDirectoryDeletePermissionDenied(options: { - readonly operation: string - readonly targetKind: string - readonly error: unknown - readonly platform: NodeJS.Platform -}): boolean { - if (options.platform !== 'win32') return false - if (options.operation !== 'delete') return false - if (options.targetKind !== 'directory') return false - - const normalizedError = normalizeErrorMessage(options.error) - return normalizedError.includes('eperm') || normalizedError.includes('permission denied') -} - -function buildFileOperationAdvice(options: { - readonly operation: string - readonly targetKind: string - readonly path: string - readonly error: unknown - readonly platform: NodeJS.Platform -}): FileOperationAdvice { - if (isWindowsDirectoryDeletePermissionDenied(options)) { - return { - exactFix: diagnosticLines( - `Close any process that is using "${options.path}", delete the stale directory, and rerun tnmsc.`, - `Common lockers on Windows include editors, terminals, antivirus scanners, sync clients, and AI tools watching generated files.` - ), - possibleFixes: [ - diagnosticLines( - `Use Resource Monitor or Process Explorer to find which process holds a handle under "${options.path}".` - ), - diagnosticLines( - `Make sure no shell, editor tab, or file watcher is currently opened inside "${options.path}" or one of its children.` - ), - diagnosticLines( - `If antivirus or cloud sync is scanning generated outputs, wait for it to release the directory or exclude this output path.` - ) - ] - } - } - - return { - exactFix: diagnosticLines( - `Verify that "${options.path}" exists, has the expected type, and is accessible to tnmsc.` - ), - possibleFixes: [ - diagnosticLines('Check file permissions and ownership for the target path.'), - diagnosticLines('Confirm that another process did not delete, move, or lock the target path.') - ] - } -} - -export function buildFileOperationDiagnostic(options: FileOperationDiagnosticOptions): LoggerDiagnosticInput { - const { - code, - title, - operation, - targetKind, - path, - error, - platform, - exactFix, - possibleFixes, - details - } = options - const errorMessage = toErrorMessage(error) - const advice = buildFileOperationAdvice({ - operation, - targetKind, - path, - error, - platform: platform ?? process.platform - }) - - return buildDiagnostic({ - code, - title, - rootCause: diagnosticLines( - `tnmsc could not ${operation} the ${targetKind} at "${path}".`, - `Underlying error: ${errorMessage}` - ), - exactFix: exactFix ?? advice.exactFix, - possibleFixes: possibleFixes ?? advice.possibleFixes, - details: { - operation, - targetKind, - path, - errorMessage, - platform: platform ?? process.platform, - ...details ?? {} - } - }) -} - -interface BatchFileOperationDiagnosticOptions { - readonly code: string - readonly title: string - readonly operation: string - readonly targetKind: string - readonly failures: readonly DiagnosticFailure[] - readonly exactFix?: DiagnosticLines | undefined - readonly possibleFixes?: readonly DiagnosticLines[] | undefined - readonly details?: Record | undefined -} - -export function buildBatchFileOperationDiagnostic(options: BatchFileOperationDiagnosticOptions): LoggerDiagnosticInput { - const { - code, - title, - operation, - targetKind, - failures, - exactFix, - possibleFixes, - details - } = options - const firstFailure = failures[0] - const firstFailureLine = firstFailure == null - ? 'No failing path details were captured.' - : `First failure: "${firstFailure.path}" -> ${toErrorMessage(firstFailure.error)}` - - return buildDiagnostic({ - code, - title, - rootCause: diagnosticLines( - `tnmsc encountered ${failures.length} failed ${operation} operation(s) while handling ${targetKind}.`, - firstFailureLine - ), - exactFix: exactFix ?? diagnosticLines( - `Inspect the failing ${targetKind} path and correct the underlying ${operation} problem before retrying tnmsc.` - ), - possibleFixes: possibleFixes ?? [ - diagnosticLines('Verify the target path exists, has the expected type, and is accessible to tnmsc.'), - diagnosticLines('Check whether another process deleted, moved, or locked the target path.') - ], - details: { - operation, - targetKind, - failures: failures.map(failure => ({ - path: failure.path, - errorMessage: toErrorMessage(failure.error), - ...failure.details ?? {} - })), - ...details ?? {} - } - }) -} - -interface ConfigDiagnosticOptions { - readonly code: string - readonly title: string - readonly reason: DiagnosticLines - readonly configPath?: string | undefined - readonly exactFix?: DiagnosticLines | undefined - readonly possibleFixes?: readonly DiagnosticLines[] | undefined - readonly details?: Record | undefined -} - -export function buildConfigDiagnostic(options: ConfigDiagnosticOptions): LoggerDiagnosticInput { - const { - code, - title, - reason, - configPath, - exactFix, - possibleFixes, - details - } = options - - return buildDiagnostic({ - code, - title, - rootCause: configPath == null - ? reason - : diagnosticLines(reason[0], ...reason.slice(1), `Config path: ${configPath}`), - exactFix, - possibleFixes, - details: { - ...configPath != null ? {configPath} : {}, - ...details ?? {} - } - }) -} - -interface UsageDiagnosticOptions { - readonly code: string - readonly title: string - readonly rootCause: DiagnosticLines - readonly exactFix?: DiagnosticLines | undefined - readonly possibleFixes?: readonly DiagnosticLines[] | undefined - readonly details?: Record | undefined -} - -export function buildUsageDiagnostic(options: UsageDiagnosticOptions): LoggerDiagnosticInput { - return buildDiagnostic(options) -} - -interface PathStateDiagnosticOptions { - readonly code: string - readonly title: string - readonly path: string - readonly expectedKind: string - readonly actualState: string - readonly exactFix?: DiagnosticLines | undefined - readonly possibleFixes?: readonly DiagnosticLines[] | undefined - readonly details?: Record | undefined -} - -export function buildPathStateDiagnostic(options: PathStateDiagnosticOptions): LoggerDiagnosticInput { - const { - code, - title, - path, - expectedKind, - actualState, - exactFix, - possibleFixes, - details - } = options - - return buildDiagnostic({ - code, - title, - rootCause: diagnosticLines( - `tnmsc expected a ${expectedKind} at "${path}".`, - `Actual state: ${actualState}` - ), - exactFix: exactFix ?? diagnosticLines( - `Create or replace "${path}" with a valid ${expectedKind} before retrying tnmsc.` - ), - possibleFixes: possibleFixes ?? [ - diagnosticLines('Check whether the path was moved, deleted, or replaced with the wrong file type.'), - diagnosticLines('Update your configuration so tnmsc points to the intended source path.') - ], - details: { - path, - expectedKind, - actualState, - ...details ?? {} - } - }) -} - -interface PromptCompilerDiagnosticOptions { - readonly code: string - readonly title: string - readonly diagnosticText: string - readonly exactFix?: DiagnosticLines | undefined - readonly possibleFixes?: readonly DiagnosticLines[] | undefined - readonly details?: Record | undefined -} - -export function buildPromptCompilerDiagnostic(options: PromptCompilerDiagnosticOptions): LoggerDiagnosticInput { - const { - code, - title, - diagnosticText, - exactFix, - possibleFixes, - details - } = options - - const summaryLines = splitDiagnosticText(diagnosticText) - - return buildDiagnostic({ - code, - title, - rootCause: summaryLines, - exactFix: exactFix ?? diagnosticLines( - 'Fix the referenced prompt source or compiled dist file so the compiler diagnostic no longer triggers.' - ), - possibleFixes: possibleFixes ?? [ - diagnosticLines('Open the file referenced in the diagnostic and correct the reported syntax or metadata issue.'), - diagnosticLines('Rebuild the prompt output so the dist file matches the current source tree.') - ], - details: { - diagnosticText, - ...details ?? {} - } - }) -} - -export function buildProtectedDeletionDiagnostic( - operation: string, - violations: readonly ProtectedPathViolation[] -): LoggerDiagnosticInput { - const firstViolation = violations[0] - - return buildDiagnostic({ - code: 'PROTECTED_DELETION_GUARD_TRIGGERED', - title: 'Protected deletion guard blocked a destructive operation', - rootCause: diagnosticLines( - `The "${operation}" operation targeted ${violations.length} protected path(s).`, - firstViolation != null - ? `Example protected path: ${firstViolation.protectedPath}` - : 'No violation details were captured.' - ), - exactFix: diagnosticLines( - 'Remove protected inputs or reserved workspace paths from the delete plan before running tnmsc again.' - ), - possibleFixes: [ - diagnosticLines('Update cleanup declarations so they only target generated output paths.'), - diagnosticLines('Move source inputs outside of the cleanup target set if they are currently overlapping.') - ], - details: { - operation, - count: violations.length, - violations: violations.map(violation => ({ - targetPath: violation.targetPath, - protectedPath: violation.protectedPath, - protectionMode: violation.protectionMode, - source: violation.source, - reason: violation.reason - })) - } - }) -} - -export function buildUnhandledExceptionDiagnostic(context: string, error: unknown): LoggerDiagnosticInput { - const errorMessage = toErrorMessage(error) - - return buildDiagnostic({ - code: 'UNHANDLED_EXCEPTION', - title: `Unhandled exception in ${context}`, - rootCause: diagnosticLines( - `tnmsc terminated because an unhandled exception escaped the ${context} flow.`, - `Underlying error: ${errorMessage}` - ), - exactFix: diagnosticLines( - 'Inspect the error context and add the missing guard, validation, or recovery path before retrying the command.' - ), - possibleFixes: [ - diagnosticLines('Re-run the command with the same inputs after fixing the referenced file or configuration.'), - diagnosticLines('Add a focused test that reproduces this failure so the regression stays covered.') - ], - details: { - context, - errorMessage - } - }) -} - -export function partitionBufferedDiagnostics( - diagnostics: readonly LoggerDiagnosticRecord[] -): {warnings: LoggerDiagnosticRecord[], errors: LoggerDiagnosticRecord[]} { - const warnings: LoggerDiagnosticRecord[] = [] - const errors: LoggerDiagnosticRecord[] = [] - - for (const diagnostic of diagnostics) { - if (diagnostic.level === 'warn') warnings.push(diagnostic) - else errors.push(diagnostic) - } - - return {warnings, errors} -} diff --git a/cli/src/globals.ts b/cli/src/globals.ts index 4622248d..7b218b19 100644 --- a/cli/src/globals.ts +++ b/cli/src/globals.ts @@ -1 +1 @@ -export * from '@truenine/md-compiler/globals' +export * from '@truenine/memory-sync-sdk/globals' diff --git a/cli/src/index.test.ts b/cli/src/index.test.ts index 0727ccea..c14dfa2f 100644 --- a/cli/src/index.test.ts +++ b/cli/src/index.test.ts @@ -1,11 +1,16 @@ -import {describe, expect, it} from 'vitest' +import { + createDefaultPluginConfig, + listPrompts, + runCli +} from '@truenine/memory-sync-sdk' -describe('library entrypoint', () => { - it('can be imported without executing the CLI runtime', async () => { - const mod = await import('./index') +import {describe, expect, it} from 'vitest' +import * as cliShell from './index' - expect(typeof mod.runCli).toBe('function') - expect(typeof mod.createDefaultPluginConfig).toBe('function') - expect(typeof mod.listPrompts).toBe('function') +describe('cli shell entrypoint', () => { + it('re-exports the sdk surface without executing the CLI runtime', async () => { + expect(cliShell.runCli).toBe(runCli) + expect(cliShell.createDefaultPluginConfig).toBe(createDefaultPluginConfig) + expect(cliShell.listPrompts).toBe(listPrompts) }) }) diff --git a/cli/src/index.ts b/cli/src/index.ts index dffa366e..31428d56 100644 --- a/cli/src/index.ts +++ b/cli/src/index.ts @@ -3,22 +3,9 @@ import {existsSync, realpathSync} from 'node:fs' import process from 'node:process' import {fileURLToPath} from 'node:url' -import {runCli} from './cli-runtime' +import {runCli} from '@truenine/memory-sync-sdk' -export * from './Aindex' -export * from './cli-runtime' -export * from './config' -export * from './ConfigLoader' -export { - createDefaultPluginConfig -} from './plugin.config' -export * from './PluginPipeline' -export { - DEFAULT_USER_CONFIG, - PathPlaceholders -} from './plugins/plugin-core' - -export * from './prompts' +export * from '@truenine/memory-sync-sdk' function isCliEntrypoint(argv: readonly string[] = process.argv): boolean { const entryPath = argv[1] diff --git a/cli/src/inputs/AbstractInputCapability.ts b/cli/src/inputs/AbstractInputCapability.ts deleted file mode 100644 index 244400da..00000000 --- a/cli/src/inputs/AbstractInputCapability.ts +++ /dev/null @@ -1,186 +0,0 @@ -import type {ParsedMarkdown} from '@truenine/md-compiler/markdown' -import type { - InputCapability, - InputCapabilityContext, - InputCollectedContext, - InputEffectContext, - InputEffectHandler, - InputEffectRegistration, - InputEffectResult, - PluginOptions, - PluginScopeRegistration, - ResolvedBasePaths, - YAMLFrontMatter -} from '@/plugins/plugin-core' - -import {spawn} from 'node:child_process' -import * as path from 'node:path' -import {createLogger} from '@truenine/logger' -import {parseMarkdown} from '@truenine/md-compiler/markdown' -import {buildDiagnostic, diagnosticLines} from '@/diagnostics' -import {PathPlaceholders} from '@/plugins/plugin-core' -import {logProtectedDeletionGuardError, ProtectedDeletionGuardError} from '@/ProtectedDeletionGuard' -import {resolveUserPath} from '@/runtime-environment' - -export abstract class AbstractInputCapability implements InputCapability { - private readonly inputEffects: InputEffectRegistration[] = [] - - private readonly registeredScopes: PluginScopeRegistration[] = [] - - readonly name: string - - readonly dependsOn?: readonly string[] - - private _log?: import('@truenine/logger').ILogger - - get log(): import('@truenine/logger').ILogger { - this._log ??= createLogger(this.name) - return this._log - } - - protected constructor(name: string, dependsOn?: readonly string[]) { - this.name = name - if (dependsOn != null) this.dependsOn = dependsOn - } - - protected registerEffect(name: string, handler: InputEffectHandler, priority: number = 0): void { - this.inputEffects.push({name, handler, priority}) - this.inputEffects.sort((a, b) => (a.priority ?? 0) - (b.priority ?? 0)) // Sort by priority (lower = earlier) - } - - async executeEffects(ctx: InputCapabilityContext, dryRun: boolean = false): Promise { - const results: InputEffectResult[] = [] - - if (this.inputEffects.length === 0) return results - - const {workspaceDir, aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) - - const effectCtx: InputEffectContext = { - logger: this.log, - fs: ctx.fs, - path: ctx.path, - glob: ctx.glob, - spawn, - userConfigOptions: ctx.userConfigOptions, - workspaceDir, - aindexDir, - dryRun - } - - for (const effect of this.inputEffects) { - if (dryRun) { - this.log.trace({action: 'dryRun', type: 'inputEffect', name: effect.name}) - results.push({success: true, description: `Would execute input effect: ${effect.name}`}) - continue - } - - try { - const result = await effect.handler(effectCtx) - if (result.success) { - this.log.trace({action: 'inputEffect', name: effect.name, status: 'success', description: result.description}) - if (result.modifiedFiles != null && result.modifiedFiles.length > 0) { - this.log.debug({action: 'inputEffect', name: effect.name, modifiedFileCount: result.modifiedFiles.length}) - } - if (result.deletedFiles != null && result.deletedFiles.length > 0) { - this.log.debug({action: 'inputEffect', name: effect.name, deletedFileCount: result.deletedFiles.length}) - } - } else { - const error = result.error ?? new Error(`Input effect failed: ${effect.name}`) - throw error - } - results.push(result) - } - catch (error) { - const effectError = error instanceof Error ? error : new Error(String(error)) - this.logInputEffectFailure(effect.name, effectError) - results.push({success: false, error: effectError, description: `Input effect failed: ${effect.name}`}) - throw effectError - } - } - - return results - } - - private logInputEffectFailure(effectName: string, error: Error): void { - if (error instanceof ProtectedDeletionGuardError) { - logProtectedDeletionGuardError(this.log, error.operation, error.violations) - return - } - - this.log.error(buildDiagnostic({ - code: 'INPUT_EFFECT_FAILED', - title: `Input effect failed: ${effectName}`, - rootCause: diagnosticLines( - `The input effect "${effectName}" failed before tnmsc could finish preprocessing.`, - `Underlying error: ${error.message}` - ), - exactFix: diagnosticLines( - 'Inspect the effect inputs and fix the failing file, path, or environment condition before retrying tnmsc.' - ), - possibleFixes: [ - diagnosticLines('Re-run the command after fixing the referenced path or generated artifact.'), - diagnosticLines('Add a focused regression test if this effect should handle the failure more gracefully.') - ], - details: { - effectName, - errorMessage: error.message - } - })) - } - - hasEffects(): boolean { - return this.inputEffects.length > 0 - } - - getEffectCount(): number { - return this.inputEffects.length - } - - protected registerScope(namespace: string, values: Record): void { - this.registeredScopes.push({namespace, values}) - this.log.debug({action: 'registerScope', namespace, keys: Object.keys(values)}) - } - - getRegisteredScopes(): readonly PluginScopeRegistration[] { - return this.registeredScopes - } - - protected clearRegisteredScopes(): void { - this.registeredScopes.length = 0 - this.log.debug({action: 'clearRegisteredScopes'}) - } - - abstract collect(ctx: InputCapabilityContext): Partial | Promise> - - protected resolveBasePaths(options: Required): ResolvedBasePaths { - const workspaceDirRaw = options.workspaceDir - const workspaceDir = this.resolvePath(workspaceDirRaw, '') - - const aindexDirName = options.aindex?.dir ?? 'aindex' // 从配置读取 aindex 目录名,默认为 'aindex' - const aindexDir = path.join(workspaceDir, aindexDirName) - - return {workspaceDir, aindexDir} - } - - protected resolvePath(rawPath: string, workspaceDir: string): string { - let resolved = rawPath - - if (resolved.includes(PathPlaceholders.WORKSPACE)) resolved = resolved.replace(PathPlaceholders.WORKSPACE, workspaceDir) - - if (resolved.startsWith(PathPlaceholders.USER_HOME)) return resolveUserPath(resolved) - - return path.normalize(resolveUserPath(resolved)) - } - - protected resolveAindexPath(relativePath: string, aindexDir: string): string { - return path.join(aindexDir, relativePath) - } - - protected readAndParseMarkdown( - filePath: string, - fs: typeof import('node:fs') - ): ParsedMarkdown { - const rawContent = fs.readFileSync(filePath, 'utf8') - return parseMarkdown(rawContent) - } -} diff --git a/cli/src/inputs/effect-md-cleanup.ts b/cli/src/inputs/effect-md-cleanup.ts deleted file mode 100644 index 02a02575..00000000 --- a/cli/src/inputs/effect-md-cleanup.ts +++ /dev/null @@ -1,166 +0,0 @@ -import type { - InputCapabilityContext, - InputCollectedContext, - InputEffectContext, - InputEffectResult -} from '../plugins/plugin-core' -import {resolveAindexProjectSeriesConfigs} from '@/aindex-project-series' -import {buildFileOperationDiagnostic} from '@/diagnostics' -import {AbstractInputCapability} from '../plugins/plugin-core' - -export interface WhitespaceCleanupEffectResult extends InputEffectResult { - readonly modifiedFiles: string[] - readonly skippedFiles: string[] -} - -export class MarkdownWhitespaceCleanupEffectInputCapability extends AbstractInputCapability { - constructor() { - super('MarkdownWhitespaceCleanupEffectInputCapability') - this.registerEffect('markdown-whitespace-cleanup', this.cleanupWhitespace.bind(this), 30) - } - - private async cleanupWhitespace(ctx: InputEffectContext): Promise { - const {fs, path, aindexDir, dryRun, logger, userConfigOptions} = ctx - - const modifiedFiles: string[] = [] - const skippedFiles: string[] = [] - const errors: {path: string, error: Error}[] = [] - const projectSeriesDirs = resolveAindexProjectSeriesConfigs(userConfigOptions) - .map(series => path.join(aindexDir, series.src)) - - const dirsToScan = [ - path.join(aindexDir, 'src'), - ...projectSeriesDirs, - path.join(aindexDir, 'dist') - ] - - for (const dir of dirsToScan) { - if (!fs.existsSync(dir)) { - logger.debug({action: 'whitespace-cleanup', message: 'Directory does not exist, skipping', dir}) - continue - } - - this.processDirectory(ctx, dir, modifiedFiles, skippedFiles, errors, dryRun ?? false) - } - - const hasErrors = errors.length > 0 - - return { - success: !hasErrors, - description: dryRun - ? `Would modify ${modifiedFiles.length} files, skip ${skippedFiles.length} files` - : `Modified ${modifiedFiles.length} files, skipped ${skippedFiles.length} files`, - modifiedFiles, - skippedFiles, - ...hasErrors && {error: new Error(`${errors.length} errors occurred during cleanup`)} - } - } - - private processDirectory( - ctx: InputEffectContext, - dir: string, - modifiedFiles: string[], - skippedFiles: string[], - errors: {path: string, error: Error}[], - dryRun: boolean - ): void { - const {fs, path, logger} = ctx - - let entries: import('node:fs').Dirent[] - try { - entries = fs.readdirSync(dir, {withFileTypes: true}) - } - catch (error) { - errors.push({path: dir, error: error as Error}) - logger.warn(buildFileOperationDiagnostic({ - code: 'WHITESPACE_CLEANUP_DIRECTORY_READ_FAILED', - title: 'Whitespace cleanup could not read a directory', - operation: 'read', - targetKind: 'cleanup directory', - path: dir, - error - })) - return - } - - for (const entry of entries) { - const entryPath = path.join(dir, entry.name) - - if (entry.isDirectory()) this.processDirectory(ctx, entryPath, modifiedFiles, skippedFiles, errors, dryRun) - else if (entry.isFile() && entry.name.endsWith('.md')) this.processMarkdownFile(ctx, entryPath, modifiedFiles, skippedFiles, errors, dryRun) - } - } - - private processMarkdownFile( - ctx: InputEffectContext, - filePath: string, - modifiedFiles: string[], - skippedFiles: string[], - errors: {path: string, error: Error}[], - dryRun: boolean - ): void { - const {fs, logger} = ctx - - try { - const originalContent = fs.readFileSync(filePath, 'utf8') - const cleanedContent = this.cleanMarkdownContent(originalContent) - - if (originalContent === cleanedContent) { - skippedFiles.push(filePath) - logger.debug({action: 'whitespace-cleanup', skipped: filePath, reason: 'no changes needed'}) - return - } - - if (dryRun) { - logger.debug({action: 'whitespace-cleanup', dryRun: true, wouldModify: filePath}) - modifiedFiles.push(filePath) - } else { - fs.writeFileSync(filePath, cleanedContent, 'utf8') - modifiedFiles.push(filePath) - logger.debug({action: 'whitespace-cleanup', modified: filePath}) - } - } - catch (error) { - errors.push({path: filePath, error: error as Error}) - logger.warn(buildFileOperationDiagnostic({ - code: 'WHITESPACE_CLEANUP_FILE_PROCESS_FAILED', - title: 'Whitespace cleanup could not process a markdown file', - operation: 'process', - targetKind: 'markdown file', - path: filePath, - error - })) - } - } - - cleanMarkdownContent(content: string): string { - const lineEnding = this.detectLineEnding(content) - const lines = content.split(/\r?\n/) - const trimmedLines = lines.map(line => line.replace(/[ \t]+$/, '')) - - const result: string[] = [] - let consecutiveBlankCount = 0 - - for (const line of trimmedLines) { - if (line === '') { - consecutiveBlankCount++ - if (consecutiveBlankCount <= 2) result.push(line) - } else { - consecutiveBlankCount = 0 - result.push(line) - } - } - - return result.join(lineEnding) - } - - detectLineEnding(content: string): '\r\n' | '\n' { - if (content.includes('\r\n')) return '\r\n' - return '\n' - } - - collect(ctx: InputCapabilityContext): Partial { - void ctx - return {} - } -} diff --git a/cli/src/inputs/effect-orphan-cleanup.test.ts b/cli/src/inputs/effect-orphan-cleanup.test.ts deleted file mode 100644 index 4e79454f..00000000 --- a/cli/src/inputs/effect-orphan-cleanup.test.ts +++ /dev/null @@ -1,249 +0,0 @@ -import type {InputCapabilityContext} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {createLogger} from '../plugins/plugin-core' -import {OrphanFileCleanupEffectInputCapability} from './effect-orphan-cleanup' - -const legacySourceExtension = '.cn.mdx' - -function createContext(tempWorkspace: string): InputCapabilityContext { - const options = mergeConfig({workspaceDir: tempWorkspace}) - - return { - logger: createLogger('OrphanFileCleanupEffectInputCapabilityTest', 'error'), - fs, - path, - glob, - userConfigOptions: options, - dependencyContext: {} - } as InputCapabilityContext -} - -class TestOrphanFileCleanupEffectInputCapability extends OrphanFileCleanupEffectInputCapability { - constructor(private readonly planFactory: (ctx: ReturnType & {readonly fs: typeof fs, readonly path: typeof path}) => { - filesToDelete: string[] - dirsToDelete: string[] - errors: {path: string, error: Error}[] - }) { - super() - } - - protected override buildDeletionPlan(ctx: Parameters[0]): { - filesToDelete: string[] - dirsToDelete: string[] - errors: {path: string, error: Error}[] - } { - const basePaths = this.resolveBasePaths(ctx.userConfigOptions) - return this.planFactory({...basePaths, fs: ctx.fs, path: ctx.path}) - } -} - -describe('orphan file cleanup effect', () => { - it('keeps dist command files when a matching .src.mdx source exists', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-test-')) - const srcDir = path.join(tempWorkspace, 'aindex', 'commands') - const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'commands') - const distFile = path.join(distDir, 'demo.mdx') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.mkdirSync(distDir, {recursive: true}) - fs.writeFileSync(path.join(srcDir, 'demo.src.mdx'), '---\ndescription: source\n---\nSource prompt', 'utf8') - fs.writeFileSync(distFile, 'Compiled prompt', 'utf8') - - const plugin = new OrphanFileCleanupEffectInputCapability() - const [result] = await plugin.executeEffects(createContext(tempWorkspace)) - - expect(result?.success).toBe(true) - expect(fs.existsSync(distFile)).toBe(true) - expect(result?.deletedFiles ?? []).toHaveLength(0) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('deletes dist command mdx files when only a legacy cn source remains', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-legacy-test-')) - const srcDir = path.join(tempWorkspace, 'aindex', 'commands') - const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'commands') - const distFile = path.join(distDir, 'demo.mdx') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.mkdirSync(distDir, {recursive: true}) - fs.writeFileSync(path.join(srcDir, `demo${legacySourceExtension}`), '---\ndescription: legacy\n---\nLegacy prompt', 'utf8') - fs.writeFileSync(distFile, 'Compiled prompt', 'utf8') - - const plugin = new OrphanFileCleanupEffectInputCapability() - const [result] = await plugin.executeEffects(createContext(tempWorkspace)) - - expect(result?.success).toBe(true) - expect(fs.existsSync(distFile)).toBe(false) - expect(result?.deletedDirs ?? []).toContain(path.join(tempWorkspace, 'aindex', 'dist', 'commands')) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('fails when an orphan cleanup candidate hits an exact protected path', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-guard-exact-')) - const safeDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'commands', 'safe.mdx') - const globalConfigPath = path.join(os.homedir(), '.aindex', '.tnmsc.json') - - try { - fs.mkdirSync(path.dirname(safeDistFile), {recursive: true}) - fs.writeFileSync(safeDistFile, 'Compiled prompt', 'utf8') - - const plugin = new TestOrphanFileCleanupEffectInputCapability(() => ({ - filesToDelete: [safeDistFile, globalConfigPath], - dirsToDelete: [], - errors: [] - })) - - await expect(plugin.executeEffects(createContext(tempWorkspace))).rejects.toThrow('Protected deletion guard blocked orphan-file-cleanup') - expect(fs.existsSync(safeDistFile)).toBe(true) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('fails without partial deletion when safe and subtree-protected candidates are mixed', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-guard-subtree-')) - const srcDir = path.join(tempWorkspace, 'aindex', 'commands') - const protectedSourceFile = path.join(srcDir, 'demo.src.mdx') - const safeDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'commands', 'safe.mdx') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.mkdirSync(path.dirname(safeDistFile), {recursive: true}) - fs.writeFileSync(protectedSourceFile, '---\ndescription: source\n---\nSource prompt', 'utf8') - fs.writeFileSync(safeDistFile, 'Compiled prompt', 'utf8') - - const plugin = new TestOrphanFileCleanupEffectInputCapability(() => ({ - filesToDelete: [safeDistFile, protectedSourceFile], - dirsToDelete: [], - errors: [] - })) - - await expect(plugin.executeEffects(createContext(tempWorkspace))).rejects.toThrow('Protected deletion guard blocked orphan-file-cleanup') - expect(fs.existsSync(safeDistFile)).toBe(true) - expect(fs.existsSync(protectedSourceFile)).toBe(true) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('collapses nested orphan directories to the highest removable subtree root', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-collapse-test-')) - const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'commands', 'legacy', 'deep') - const orphanFile = path.join(distDir, 'demo.txt') - - try { - fs.mkdirSync(distDir, {recursive: true}) - fs.writeFileSync(orphanFile, 'Compiled prompt', 'utf8') - - const plugin = new OrphanFileCleanupEffectInputCapability() - const [result] = await plugin.executeEffects(createContext(tempWorkspace)) - - expect(result?.success).toBe(true) - expect(result?.deletedFiles).toEqual([]) - expect(result?.deletedDirs).toEqual([path.join(tempWorkspace, 'aindex', 'dist', 'commands')]) - expect(fs.existsSync(path.join(tempWorkspace, 'aindex', 'dist', 'commands'))).toBe(false) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('cleans orphaned ext and arch dist files using matching series source roots', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-series-')) - const extSrcFile = path.join(tempWorkspace, 'aindex', 'ext', 'plugin-a', 'agt.src.mdx') - const extDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'ext', 'plugin-a', 'agt.mdx') - const archDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'arch', 'system-a', 'agt.mdx') - - try { - fs.mkdirSync(path.dirname(extSrcFile), {recursive: true}) - fs.mkdirSync(path.dirname(extDistFile), {recursive: true}) - fs.mkdirSync(path.dirname(archDistFile), {recursive: true}) - fs.writeFileSync(extSrcFile, '---\ndescription: ext\n---\nExt prompt', 'utf8') - fs.writeFileSync(extDistFile, 'Ext dist', 'utf8') - fs.writeFileSync(archDistFile, 'Arch dist', 'utf8') - - const plugin = new OrphanFileCleanupEffectInputCapability() - const [result] = await plugin.executeEffects(createContext(tempWorkspace)) - - expect(result?.success).toBe(true) - expect(fs.existsSync(extDistFile)).toBe(true) - expect(fs.existsSync(archDistFile)).toBe(false) - expect(result?.deletedDirs ?? []).toContain(path.join(tempWorkspace, 'aindex', 'dist', 'arch')) - expect(result?.deletedFiles ?? []).not.toContain(extDistFile) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('cleans orphaned softwares dist files using the matching software source root', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-softwares-')) - const softwareSrcFile = path.join(tempWorkspace, 'aindex', 'softwares', 'tool-a', 'agt.src.mdx') - const softwareDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'softwares', 'tool-a', 'agt.mdx') - const orphanSoftwareDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'softwares', 'tool-b', 'agt.mdx') - - try { - fs.mkdirSync(path.dirname(softwareSrcFile), {recursive: true}) - fs.mkdirSync(path.dirname(softwareDistFile), {recursive: true}) - fs.mkdirSync(path.dirname(orphanSoftwareDistFile), {recursive: true}) - fs.writeFileSync(softwareSrcFile, '---\ndescription: software\n---\nSoftware prompt', 'utf8') - fs.writeFileSync(softwareDistFile, 'Software dist', 'utf8') - fs.writeFileSync(orphanSoftwareDistFile, 'Orphan software dist', 'utf8') - - const plugin = new OrphanFileCleanupEffectInputCapability() - const [result] = await plugin.executeEffects(createContext(tempWorkspace)) - - expect(result?.success).toBe(true) - expect(fs.existsSync(softwareDistFile)).toBe(true) - expect(fs.existsSync(orphanSoftwareDistFile)).toBe(false) - expect(result?.deletedDirs ?? []).toContain(path.dirname(orphanSoftwareDistFile)) - expect(result?.deletedFiles ?? []).not.toContain(softwareDistFile) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('cleans orphaned subagent dist files using the configured subagents source root', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-subagents-')) - const subAgentSrcFile = path.join(tempWorkspace, 'aindex', 'subagents', 'qa', 'boot.src.mdx') - const subAgentDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'subagents', 'qa', 'boot.mdx') - const orphanSubAgentDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'subagents', 'ops', 'boot.mdx') - - try { - fs.mkdirSync(path.dirname(subAgentSrcFile), {recursive: true}) - fs.mkdirSync(path.dirname(subAgentDistFile), {recursive: true}) - fs.mkdirSync(path.dirname(orphanSubAgentDistFile), {recursive: true}) - fs.writeFileSync(subAgentSrcFile, '---\ndescription: subagent\n---\nSubagent prompt', 'utf8') - fs.writeFileSync(subAgentDistFile, 'Subagent dist', 'utf8') - fs.writeFileSync(orphanSubAgentDistFile, 'Orphan subagent dist', 'utf8') - - const plugin = new OrphanFileCleanupEffectInputCapability() - const [result] = await plugin.executeEffects(createContext(tempWorkspace)) - - expect(result?.success).toBe(true) - expect(fs.existsSync(subAgentDistFile)).toBe(true) - expect(fs.existsSync(orphanSubAgentDistFile)).toBe(false) - expect(result?.deletedDirs ?? []).toContain(path.dirname(orphanSubAgentDistFile)) - expect(result?.deletedFiles ?? []).not.toContain(subAgentDistFile) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/inputs/effect-orphan-cleanup.ts b/cli/src/inputs/effect-orphan-cleanup.ts deleted file mode 100644 index a0356d58..00000000 --- a/cli/src/inputs/effect-orphan-cleanup.ts +++ /dev/null @@ -1,308 +0,0 @@ -import type { - AindexPromptTreeDirectoryPairKey, - InputCapabilityContext, - InputCollectedContext, - InputEffectContext, - InputEffectResult, - PluginOptions -} from '../plugins/plugin-core' -import {buildFileOperationDiagnostic} from '@/diagnostics' -import {compactDeletionTargets} from '../cleanup/delete-targets' -import {deleteTargets} from '../core/desk-paths' -import { - AbstractInputCapability, - AINDEX_PROMPT_TREE_DIRECTORY_PAIR_KEYS, - SourcePromptFileExtensions -} from '../plugins/plugin-core' -import { - collectConfiguredAindexInputRules, - createProtectedDeletionGuard, - partitionDeletionTargets, - ProtectedDeletionGuardError -} from '../ProtectedDeletionGuard' - -export interface OrphanCleanupEffectResult extends InputEffectResult { - readonly deletedFiles: string[] - readonly deletedDirs: string[] -} - -interface OrphanCleanupDirectoryConfig { - readonly key: AindexPromptTreeDirectoryPairKey - readonly srcPath: string - readonly distPath: string -} - -interface OrphanCleanupPlan { - readonly filesToDelete: string[] - readonly dirsToDelete: string[] - readonly errors: readonly {path: string, error: Error}[] -} - -export class OrphanFileCleanupEffectInputCapability extends AbstractInputCapability { - constructor() { - super('OrphanFileCleanupEffectInputCapability') - this.registerEffect('orphan-file-cleanup', this.cleanupOrphanFiles.bind(this), 20) - } - - protected buildProtectedDeletionGuard(ctx: InputEffectContext): ReturnType { - return createProtectedDeletionGuard({ - workspaceDir: ctx.workspaceDir, - aindexDir: ctx.aindexDir, - includeReservedWorkspaceContentRoots: false, - rules: [ - ...collectConfiguredAindexInputRules(ctx.userConfigOptions, ctx.aindexDir, { - workspaceDir: ctx.workspaceDir - }), - ...(ctx.userConfigOptions.cleanupProtection?.rules ?? []).map(rule => ({ - path: rule.path, - protectionMode: rule.protectionMode, - reason: rule.reason ?? 'configured cleanup protection rule', - source: 'configured-cleanup-protection', - matcher: rule.matcher ?? 'path' - })) - ] - }) - } - - protected buildDeletionPlan( - ctx: InputEffectContext, - directoryConfigs: readonly OrphanCleanupDirectoryConfig[] - ): OrphanCleanupPlan { - const filesToDelete: string[] = [] - const dirsToDelete: string[] = [] - const errors: {path: string, error: Error}[] = [] - - for (const directoryConfig of directoryConfigs) { - const distSubDirPath = ctx.path.join(ctx.aindexDir, directoryConfig.distPath) - if (!ctx.fs.existsSync(distSubDirPath)) continue - if (!ctx.fs.statSync(distSubDirPath).isDirectory()) continue - const subDirWillBeEmpty = this.collectDirectoryPlan(ctx, distSubDirPath, directoryConfig, filesToDelete, dirsToDelete, errors) - if (subDirWillBeEmpty) dirsToDelete.push(distSubDirPath) - } - - return {filesToDelete, dirsToDelete, errors} - } - - protected resolveDirectoryConfigs(options: Required): readonly OrphanCleanupDirectoryConfig[] { - return AINDEX_PROMPT_TREE_DIRECTORY_PAIR_KEYS.map(key => ({ - key, - srcPath: options.aindex[key].src, - distPath: options.aindex[key].dist - })) - } - - private async cleanupOrphanFiles(ctx: InputEffectContext): Promise { - const {fs, path, aindexDir, logger, userConfigOptions, dryRun} = ctx - const distDir = path.join(aindexDir, 'dist') - - if (!fs.existsSync(distDir)) { - logger.debug({action: 'orphan-cleanup', message: 'dist/ directory does not exist, skipping', distDir}) - return { - success: true, - description: 'dist/ directory does not exist, nothing to clean', - deletedFiles: [], - deletedDirs: [] - } - } - - const plan = this.buildDeletionPlan(ctx, this.resolveDirectoryConfigs(userConfigOptions)) - - const guard = this.buildProtectedDeletionGuard(ctx) - const filePartition = partitionDeletionTargets(plan.filesToDelete, guard) - const dirPartition = partitionDeletionTargets(plan.dirsToDelete, guard) - const compactedPlan = compactDeletionTargets(filePartition.safePaths, dirPartition.safePaths) - const violations = [...filePartition.violations, ...dirPartition.violations].sort((a, b) => a.targetPath.localeCompare(b.targetPath)) - - if (violations.length > 0) { - return { - success: false, - description: `Protected deletion guard blocked orphan cleanup for ${violations.length} path(s)`, - deletedFiles: [], - deletedDirs: [], - error: new ProtectedDeletionGuardError('orphan-file-cleanup', violations) - } - } - - if (dryRun) { - return { - success: true, - description: `Would delete ${compactedPlan.files.length} files and ${compactedPlan.dirs.length} directories`, - deletedFiles: [...compactedPlan.files], - deletedDirs: [...compactedPlan.dirs] - } - } - - const deleteErrors: {path: string, error: Error}[] = [...plan.errors] - logger.debug('orphan cleanup delete execution started', { - filesToDelete: compactedPlan.files.length, - dirsToDelete: compactedPlan.dirs.length - }) - - const result = await deleteTargets({ - files: compactedPlan.files, - dirs: compactedPlan.dirs - }) - - for (const fileError of result.fileErrors) { - const normalizedError = fileError.error instanceof Error ? fileError.error : new Error(String(fileError.error)) - deleteErrors.push({path: fileError.path, error: normalizedError}) - logger.warn(buildFileOperationDiagnostic({ - code: 'ORPHAN_CLEANUP_FILE_DELETE_FAILED', - title: 'Orphan cleanup could not delete a file', - operation: 'delete', - targetKind: 'orphan file', - path: fileError.path, - error: normalizedError - })) - } - - for (const dirError of result.dirErrors) { - const normalizedError = dirError.error instanceof Error ? dirError.error : new Error(String(dirError.error)) - deleteErrors.push({path: dirError.path, error: normalizedError}) - logger.warn(buildFileOperationDiagnostic({ - code: 'ORPHAN_CLEANUP_DIRECTORY_DELETE_FAILED', - title: 'Orphan cleanup could not delete a directory', - operation: 'delete', - targetKind: 'orphan directory', - path: dirError.path, - error: normalizedError - })) - } - - logger.debug('orphan cleanup delete execution complete', { - deletedFiles: result.deletedFiles.length, - deletedDirs: result.deletedDirs.length, - errors: deleteErrors.length - }) - - const hasErrors = deleteErrors.length > 0 - return { - success: !hasErrors, - description: `Deleted ${result.deletedFiles.length} files and ${result.deletedDirs.length} directories`, - deletedFiles: [...result.deletedFiles], - deletedDirs: [...result.deletedDirs], - ...hasErrors && {error: new Error(`${deleteErrors.length} errors occurred during cleanup`)} - } - } - - protected collectDirectoryPlan( - ctx: InputEffectContext, - distDirPath: string, - directoryConfig: OrphanCleanupDirectoryConfig, - filesToDelete: string[], - dirsToDelete: string[], - errors: {path: string, error: Error}[] - ): boolean { - const {fs, path, aindexDir, logger} = ctx - - let entries: import('node:fs').Dirent[] - try { - entries = fs.readdirSync(distDirPath, {withFileTypes: true}) - } - catch (error) { - errors.push({path: distDirPath, error: error as Error}) - logger.warn(buildFileOperationDiagnostic({ - code: 'ORPHAN_CLEANUP_DIRECTORY_READ_FAILED', - title: 'Orphan cleanup could not read a directory', - operation: 'read', - targetKind: 'dist cleanup directory', - path: distDirPath, - error - })) - return false - } - - let hasRetainedEntries = false - - for (const entry of entries) { - const entryPath = path.join(distDirPath, entry.name) - - if (entry.isDirectory()) { - const childWillBeEmpty = this.collectDirectoryPlan( - ctx, - entryPath, - directoryConfig, - filesToDelete, - dirsToDelete, - errors - ) - if (childWillBeEmpty) dirsToDelete.push(entryPath) - else hasRetainedEntries = true - continue - } - - if (!entry.isFile()) { - hasRetainedEntries = true - continue - } - - const isOrphan = this.isOrphanFile(ctx, entryPath, directoryConfig, aindexDir) - if (isOrphan) filesToDelete.push(entryPath) - else hasRetainedEntries = true - } - - return !hasRetainedEntries - } - - private isOrphanFile( - ctx: InputEffectContext, - distFilePath: string, - directoryConfig: OrphanCleanupDirectoryConfig, - aindexDir: string - ): boolean { - const {fs, path} = ctx - - const fileName = path.basename(distFilePath) - const isMdxFile = fileName.endsWith('.mdx') - - const distTypeDir = path.join(aindexDir, directoryConfig.distPath) - const relativeFromType = path.relative(distTypeDir, distFilePath) - const relativeDir = path.dirname(relativeFromType) - const baseName = fileName.replace(/\.mdx$/, '') - - if (!isMdxFile) return !fs.existsSync(path.join(aindexDir, directoryConfig.srcPath, relativeFromType)) - - const possibleSrcPaths = this.getPossibleSourcePaths( - path, - aindexDir, - directoryConfig.key, - directoryConfig.srcPath, - baseName, - relativeDir - ) - return !possibleSrcPaths.some(candidatePath => fs.existsSync(candidatePath)) - } - - private getPossibleSourcePaths( - nodePath: typeof import('node:path'), - aindexDir: string, - directoryKey: AindexPromptTreeDirectoryPairKey, - srcPath: string, - baseName: string, - relativeDir: string - ): string[] { - if (directoryKey === 'skills') { - const skillParts = relativeDir === '.' ? [baseName] : relativeDir.split(nodePath.sep) - const skillName = skillParts[0] ?? baseName - const remainingPath = relativeDir === '.' ? '' : relativeDir.slice(skillName.length + 1) - - if (remainingPath !== '') { - return SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, skillName, remainingPath, `${baseName}${extension}`)) - } - - return [ - ...SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, skillName, `SKILL${extension}`)), - ...SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, skillName, `skill${extension}`)) - ] - } - - return relativeDir === '.' - ? SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, `${baseName}${extension}`)) - : SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, relativeDir, `${baseName}${extension}`)) - } - - collect(ctx: InputCapabilityContext): Partial { - void ctx - return {} - } -} diff --git a/cli/src/inputs/effect-skill-sync.test.ts b/cli/src/inputs/effect-skill-sync.test.ts deleted file mode 100644 index 2bc9ab11..00000000 --- a/cli/src/inputs/effect-skill-sync.test.ts +++ /dev/null @@ -1,115 +0,0 @@ -import type {InputCapabilityContext, PluginOptions} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {createLogger} from '../plugins/plugin-core' -import {SkillDistCleanupEffectInputCapability} from './effect-skill-sync' - -function createContext( - tempWorkspace: string, - overrides?: Partial -): InputCapabilityContext { - const options = mergeConfig({workspaceDir: tempWorkspace}, overrides ?? {}) - - return { - logger: createLogger('SkillDistCleanupEffectInputCapabilityTest', 'error'), - fs, - path, - glob, - userConfigOptions: options, - dependencyContext: {} - } as InputCapabilityContext -} - -describe('skill dist cleanup effect', () => { - it('deletes non-mdx mirrored files while preserving compiled mdx files', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-dist-cleanup-test-')) - const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') - const nestedLegacyDir = path.join(distSkillDir, 'legacy') - - try { - fs.mkdirSync(nestedLegacyDir, {recursive: true}) - fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), 'Compiled skill', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'guide.mdx'), 'Compiled guide', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'guide.src.mdx'), 'Stale source mirror', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'notes.md'), 'Legacy note', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'demo.kts'), 'println("legacy")', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'mcp.json'), '{"mcpServers":{}}', 'utf8') - fs.writeFileSync(path.join(nestedLegacyDir, 'diagram.svg'), '', 'utf8') - - const plugin = new SkillDistCleanupEffectInputCapability() - const [result] = await plugin.executeEffects(createContext(tempWorkspace)) - - expect(result?.success).toBe(true) - expect(fs.existsSync(path.join(distSkillDir, 'skill.mdx'))).toBe(true) - expect(fs.existsSync(path.join(distSkillDir, 'guide.mdx'))).toBe(true) - expect(fs.existsSync(path.join(distSkillDir, 'guide.src.mdx'))).toBe(false) - expect(fs.existsSync(path.join(distSkillDir, 'notes.md'))).toBe(false) - expect(fs.existsSync(path.join(distSkillDir, 'demo.kts'))).toBe(false) - expect(fs.existsSync(path.join(distSkillDir, 'mcp.json'))).toBe(false) - expect(fs.existsSync(path.join(nestedLegacyDir, 'diagram.svg'))).toBe(false) - expect(fs.existsSync(nestedLegacyDir)).toBe(false) - expect(result?.deletedFiles).toEqual(expect.arrayContaining([ - path.join(distSkillDir, 'guide.src.mdx'), - path.join(distSkillDir, 'notes.md'), - path.join(distSkillDir, 'demo.kts'), - path.join(distSkillDir, 'mcp.json') - ])) - expect(result?.deletedDirs).toContain(nestedLegacyDir) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('respects configured skills dist paths instead of hardcoded defaults', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-dist-cleanup-config-test-')) - const distSkillDir = path.join(tempWorkspace, 'aindex', 'compiled', 'skills', 'demo') - - try { - fs.mkdirSync(distSkillDir, {recursive: true}) - fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), 'Compiled skill', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'legacy.txt'), 'Legacy attachment', 'utf8') - - const plugin = new SkillDistCleanupEffectInputCapability() - const [result] = await plugin.executeEffects(createContext(tempWorkspace, { - aindex: { - skills: {src: 'abilities', dist: 'compiled/skills'} - } - })) - - expect(result?.success).toBe(true) - expect(fs.existsSync(path.join(distSkillDir, 'skill.mdx'))).toBe(true) - expect(fs.existsSync(path.join(distSkillDir, 'legacy.txt'))).toBe(false) - expect(result?.deletedFiles).toContain(path.join(distSkillDir, 'legacy.txt')) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('collapses nested removable skill dist directories to the highest safe root', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-dist-cleanup-collapse-test-')) - const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') - const nestedLegacyDir = path.join(distSkillDir, 'legacy', 'deep') - - try { - fs.mkdirSync(nestedLegacyDir, {recursive: true}) - fs.writeFileSync(path.join(nestedLegacyDir, 'diagram.svg'), '', 'utf8') - - const plugin = new SkillDistCleanupEffectInputCapability() - const [result] = await plugin.executeEffects(createContext(tempWorkspace)) - - expect(result?.success).toBe(true) - expect(result?.deletedFiles).toEqual([]) - expect(result?.deletedDirs).toEqual([path.join(tempWorkspace, 'aindex', 'dist', 'skills')]) - expect(fs.existsSync(path.join(tempWorkspace, 'aindex', 'dist', 'skills'))).toBe(false) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/inputs/effect-skill-sync.ts b/cli/src/inputs/effect-skill-sync.ts deleted file mode 100644 index 61c2d89b..00000000 --- a/cli/src/inputs/effect-skill-sync.ts +++ /dev/null @@ -1,181 +0,0 @@ -import type {InputCapabilityContext, InputCollectedContext, InputEffectContext, InputEffectResult} from '../plugins/plugin-core' -import {buildFileOperationDiagnostic} from '@/diagnostics' -import {compactDeletionTargets} from '../cleanup/delete-targets' -import {deleteTargets} from '../core/desk-paths' -import {AbstractInputCapability, hasSourcePromptExtension} from '../plugins/plugin-core' - -export interface SkillDistCleanupEffectResult extends InputEffectResult { - readonly deletedFiles: string[] - readonly deletedDirs: string[] -} - -interface SkillDistCleanupPlan { - readonly filesToDelete: string[] - readonly dirsToDelete: string[] - readonly errors: readonly {path: string, error: Error}[] -} - -export class SkillDistCleanupEffectInputCapability extends AbstractInputCapability { - constructor() { - super('SkillDistCleanupEffectInputCapability') - this.registerEffect('skill-dist-cleanup', this.cleanupDistSkillArtifacts.bind(this), 10) - } - - private async cleanupDistSkillArtifacts(ctx: InputEffectContext): Promise { - const {fs, logger, userConfigOptions, aindexDir, dryRun} = ctx - const srcSkillsDir = this.resolveAindexPath(userConfigOptions.aindex.skills.src, aindexDir) - const distSkillsDir = this.resolveAindexPath(userConfigOptions.aindex.skills.dist, aindexDir) - - if (!fs.existsSync(distSkillsDir)) { - logger.debug({action: 'skill-dist-cleanup', message: 'dist skills directory does not exist, skipping', srcSkillsDir, distSkillsDir}) - return { - success: true, - description: 'dist skills directory does not exist, nothing to clean', - deletedFiles: [], - deletedDirs: [] - } - } - - const plan = this.buildCleanupPlan(ctx, distSkillsDir) - const compactedPlan = compactDeletionTargets(plan.filesToDelete, plan.dirsToDelete) - - if (dryRun) { - return { - success: true, - description: `Would delete ${compactedPlan.files.length} files and ${compactedPlan.dirs.length} directories`, - deletedFiles: [...compactedPlan.files], - deletedDirs: [...compactedPlan.dirs] - } - } - - const deleteErrors: {path: string, error: Error}[] = [...plan.errors] - logger.debug('skill dist cleanup delete execution started', { - filesToDelete: compactedPlan.files.length, - dirsToDelete: compactedPlan.dirs.length - }) - - const result = await deleteTargets({ - files: compactedPlan.files, - dirs: compactedPlan.dirs - }) - - for (const fileError of result.fileErrors) { - const normalizedError = fileError.error instanceof Error ? fileError.error : new Error(String(fileError.error)) - deleteErrors.push({path: fileError.path, error: normalizedError}) - logger.warn(buildFileOperationDiagnostic({ - code: 'SKILL_DIST_CLEANUP_FILE_DELETE_FAILED', - title: 'Skill dist cleanup could not delete a file', - operation: 'delete', - targetKind: 'skill dist file', - path: fileError.path, - error: normalizedError - })) - } - - for (const dirError of result.dirErrors) { - const normalizedError = dirError.error instanceof Error ? dirError.error : new Error(String(dirError.error)) - deleteErrors.push({path: dirError.path, error: normalizedError}) - logger.warn(buildFileOperationDiagnostic({ - code: 'SKILL_DIST_CLEANUP_DIRECTORY_DELETE_FAILED', - title: 'Skill dist cleanup could not delete a directory', - operation: 'delete', - targetKind: 'skill dist directory', - path: dirError.path, - error: normalizedError - })) - } - - logger.debug('skill dist cleanup delete execution complete', { - deletedFiles: result.deletedFiles.length, - deletedDirs: result.deletedDirs.length, - errors: deleteErrors.length - }) - - const hasErrors = deleteErrors.length > 0 - return { - success: !hasErrors, - description: `Deleted ${result.deletedFiles.length} files and ${result.deletedDirs.length} directories`, - deletedFiles: [...result.deletedFiles], - deletedDirs: [...result.deletedDirs], - ...hasErrors && {error: new Error(`${deleteErrors.length} errors occurred during cleanup`)} - } - } - - private buildCleanupPlan(ctx: InputEffectContext, distSkillsDir: string): SkillDistCleanupPlan { - const filesToDelete: string[] = [] - const dirsToDelete: string[] = [] - const errors: {path: string, error: Error}[] = [] - - const rootWillBeEmpty = this.collectCleanupPlan(ctx, distSkillsDir, filesToDelete, dirsToDelete, errors) - if (rootWillBeEmpty) dirsToDelete.push(distSkillsDir) - - return {filesToDelete, dirsToDelete, errors} - } - - private collectCleanupPlan( - ctx: InputEffectContext, - currentDir: string, - filesToDelete: string[], - dirsToDelete: string[], - errors: {path: string, error: Error}[] - ): boolean { - const {fs, path, logger} = ctx - - let entries: import('node:fs').Dirent[] - try { - entries = fs.readdirSync(currentDir, {withFileTypes: true}) - } - catch (error) { - errors.push({path: currentDir, error: error as Error}) - logger.warn(buildFileOperationDiagnostic({ - code: 'SKILL_DIST_CLEANUP_DIRECTORY_READ_FAILED', - title: 'Skill dist cleanup could not read a directory', - operation: 'read', - targetKind: 'skill dist directory', - path: currentDir, - error - })) - return false - } - - let hasRetainedEntries = false - - for (const entry of entries) { - const entryPath = path.join(currentDir, entry.name) - - if (entry.isDirectory()) { - const childWillBeEmpty = this.collectCleanupPlan(ctx, entryPath, filesToDelete, dirsToDelete, errors) - if (childWillBeEmpty) dirsToDelete.push(entryPath) - else hasRetainedEntries = true - continue - } - - if (!entry.isFile()) { - hasRetainedEntries = true - continue - } - - if (this.shouldRetainCompiledSkillFile(entry.name)) { - hasRetainedEntries = true - continue - } - - filesToDelete.push(entryPath) - } - - return !hasRetainedEntries - } - - private shouldRetainCompiledSkillFile(fileName: string): boolean { - return fileName.endsWith('.mdx') && !hasSourcePromptExtension(fileName) - } - - collect(ctx: InputCapabilityContext): Partial { - void ctx - return {} - } -} - -export type SkillSyncEffectResult = SkillDistCleanupEffectResult - -export class SkillNonSrcFileSyncEffectInputCapability extends SkillDistCleanupEffectInputCapability {} diff --git a/cli/src/inputs/index.ts b/cli/src/inputs/index.ts deleted file mode 100644 index c16fe985..00000000 --- a/cli/src/inputs/index.ts +++ /dev/null @@ -1,59 +0,0 @@ -export { - MarkdownWhitespaceCleanupEffectInputCapability -} from './effect-md-cleanup' -export { - OrphanFileCleanupEffectInputCapability -} from './effect-orphan-cleanup' -export { - SkillDistCleanupEffectInputCapability, - SkillNonSrcFileSyncEffectInputCapability -} from './effect-skill-sync' // Effect Input Plugins (按优先级排序: 10, 20, 30) - -export { - SkillInputCapability -} from './input-agentskills' -export { - AindexInputCapability -} from './input-aindex' -export { - CommandInputCapability -} from './input-command' -export { - EditorConfigInputCapability -} from './input-editorconfig' -export { - GitExcludeInputCapability -} from './input-git-exclude' -export { - GitIgnoreInputCapability -} from './input-gitignore' -export { - GlobalMemoryInputCapability -} from './input-global-memory' -export { - JetBrainsConfigInputCapability -} from './input-jetbrains-config' -export { - ProjectPromptInputCapability -} from './input-project-prompt' -export { - ReadmeMdInputCapability -} from './input-readme' -export { - RuleInputCapability -} from './input-rule' -export { - AIAgentIgnoreInputCapability -} from './input-shared-ignore' -export { - SubAgentInputCapability -} from './input-subagent' -export { - VSCodeConfigInputCapability -} from './input-vscode-config' -export { - WorkspaceInputCapability -} from './input-workspace' // Regular Input Plugins -export { - ZedConfigInputCapability -} from './input-zed-config' diff --git a/cli/src/inputs/input-agentskills-export-fallback.test.ts b/cli/src/inputs/input-agentskills-export-fallback.test.ts deleted file mode 100644 index b19ace0d..00000000 --- a/cli/src/inputs/input-agentskills-export-fallback.test.ts +++ /dev/null @@ -1,80 +0,0 @@ -import type {ILogger, InputCapabilityContext} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {afterEach, describe, expect, it, vi} from 'vitest' -import {mergeConfig} from '../config' - -vi.mock('@truenine/md-compiler', () => ({ - mdxToMd: async (content: string) => ({ - content: content.replace(/export default\s*\{[\s\S]*?\}\s*/u, '').trim(), - metadata: { - fields: {}, - source: 'export' - } - }) -})) - -function createMockLogger(): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger -} - -function createContext(tempWorkspace: string, logger: ILogger): InputCapabilityContext { - const options = mergeConfig({workspaceDir: tempWorkspace}) - - return { - logger, - fs, - path, - glob, - userConfigOptions: options, - dependencyContext: {} - } as InputCapabilityContext -} - -afterEach(() => vi.resetModules()) - -describe('skill input plugin export fallback', () => { - it('uses export-default metadata when compiled metadata fields are empty', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-export-fallback-')) - const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') - const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') - - try { - fs.mkdirSync(srcSkillDir, {recursive: true}) - fs.mkdirSync(distSkillDir, {recursive: true}) - fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), `export default { - description: 'source export description', -} - -Source skill -`, 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), `export default { - description: 'dist export description', -} - -Dist skill -`, 'utf8') - - const {SkillInputCapability} = await import('./input-agentskills') - const plugin = new SkillInputCapability() - const result = await plugin.collect(createContext(tempWorkspace, createMockLogger())) - const [skill] = result.skills ?? [] - - expect(result.skills?.length ?? 0).toBe(1) - expect(skill?.yamlFrontMatter?.description).toBe('dist export description') - expect(skill?.content).toContain('Dist skill') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/inputs/input-agentskills-types.ts b/cli/src/inputs/input-agentskills-types.ts deleted file mode 100644 index 24483bab..00000000 --- a/cli/src/inputs/input-agentskills-types.ts +++ /dev/null @@ -1,10 +0,0 @@ -/** - * Types for SkillInputCapability resource processing - */ - -import type {SkillChildDoc, SkillResource} from '../plugins/plugin-core' - -export interface ResourceScanResult { - readonly childDocs: SkillChildDoc[] - readonly resources: SkillResource[] -} diff --git a/cli/src/inputs/input-agentskills.test.ts b/cli/src/inputs/input-agentskills.test.ts deleted file mode 100644 index f5881e15..00000000 --- a/cli/src/inputs/input-agentskills.test.ts +++ /dev/null @@ -1,179 +0,0 @@ -import type {ILogger, InputCapabilityContext} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {SkillInputCapability} from './input-agentskills' - -function createMockLogger(warnings: string[] = [], errors: string[] = []): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: diagnostic => warnings.push(diagnostic.code), - error: diagnostic => errors.push(diagnostic.code), - fatal: diagnostic => errors.push(diagnostic.code) - } as ILogger -} - -function createContext(tempWorkspace: string, logger: ILogger): InputCapabilityContext { - const options = mergeConfig({workspaceDir: tempWorkspace}) - - return { - logger, - fs, - path, - glob, - userConfigOptions: options, - dependencyContext: {} - } as InputCapabilityContext -} - -describe('skill input plugin', () => { - it('reads compiled mdx from dist and non-mdx resources from src', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-test-')) - const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') - const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') - - try { - fs.mkdirSync(srcSkillDir, {recursive: true}) - fs.mkdirSync(distSkillDir, {recursive: true}) - fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src skill\n---\nSkill source', 'utf8') - fs.writeFileSync(path.join(srcSkillDir, 'guide.src.mdx'), '---\ndescription: src guide\n---\nGuide source', 'utf8') - fs.writeFileSync(path.join(srcSkillDir, 'notes.md'), 'Source notes', 'utf8') - fs.writeFileSync(path.join(srcSkillDir, 'demo.kts'), 'println("source")', 'utf8') - fs.writeFileSync(path.join(srcSkillDir, 'mcp.json'), '{"mcpServers":{"demo":{"command":"demo"}}}', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\ndescription: dist skill\n---\nexport const x = 1\n\nSkill dist', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'guide.mdx'), '---\ndescription: dist guide\n---\nGuide dist', 'utf8') - - const plugin = new SkillInputCapability() - const result = await plugin.collect(createContext(tempWorkspace, createMockLogger())) - const [skill] = result.skills ?? [] - - expect(result.skills?.length ?? 0).toBe(1) - expect(skill?.skillName).toBe('demo') - expect(skill?.content).toContain('Skill dist') - expect(skill?.content).not.toContain('Skill source') - expect(skill?.content).not.toContain('export const x = 1') - expect(skill?.yamlFrontMatter?.name).toBe('demo') - expect(skill?.yamlFrontMatter?.description).toBe('dist skill') - expect(skill?.childDocs?.map(childDoc => childDoc.relativePath)).toEqual(['guide.mdx']) - expect(skill?.childDocs?.[0]?.content).toContain('Guide dist') - expect(skill?.childDocs?.[0]?.content).not.toContain('Guide source') - expect(new Set(skill?.resources?.map(resource => resource.relativePath) ?? [])).toEqual(new Set(['demo.kts', 'notes.md'])) - expect(skill?.resources?.find(resource => resource.relativePath === 'notes.md')?.content).toBe('Source notes') - expect(skill?.resources?.find(resource => resource.relativePath === 'demo.kts')?.content).toContain('println("source")') - expect(skill?.mcpConfig?.mcpServers.demo?.command).toBe('demo') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('uses src resources even when a legacy dist copy still exists', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-resource-test-')) - const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') - const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') - - try { - fs.mkdirSync(srcSkillDir, {recursive: true}) - fs.mkdirSync(distSkillDir, {recursive: true}) - fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src skill\n---\nSkill source', 'utf8') - fs.writeFileSync(path.join(srcSkillDir, 'notes.md'), 'Source notes', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\ndescription: dist skill\n---\nSkill dist', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'notes.md'), 'Legacy dist notes', 'utf8') - - const plugin = new SkillInputCapability() - const result = await plugin.collect(createContext(tempWorkspace, createMockLogger())) - const [skill] = result.skills ?? [] - - expect(skill?.resources?.find(resource => resource.relativePath === 'notes.md')?.content).toBe('Source notes') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('fails hard when child docs are missing compiled dist pairs', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-missing-child-test-')) - const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') - const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') - - try { - fs.mkdirSync(srcSkillDir, {recursive: true}) - fs.mkdirSync(distSkillDir, {recursive: true}) - fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src skill\n---\nSkill source', 'utf8') - fs.writeFileSync(path.join(srcSkillDir, 'guide.src.mdx'), '---\ndescription: src guide\n---\nGuide source', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\ndescription: dist skill\n---\nSkill dist', 'utf8') - - const plugin = new SkillInputCapability() - await expect(plugin.collect(createContext(tempWorkspace, createMockLogger()))).rejects.toThrow('Missing compiled dist prompt') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('fails hard when the main skill exists only in src', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-main-missing-dist-test-')) - const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') - - try { - fs.mkdirSync(srcSkillDir, {recursive: true}) - fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src only skill\n---\nSkill source', 'utf8') - - const plugin = new SkillInputCapability() - await expect(plugin.collect(createContext(tempWorkspace, createMockLogger()))).rejects.toThrow('Missing compiled dist prompt') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('rejects workspace as an unsupported skill scope', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-workspace-scope-test-')) - const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') - const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') - - try { - fs.mkdirSync(srcSkillDir, {recursive: true}) - fs.mkdirSync(distSkillDir, {recursive: true}) - fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src skill\n---\nSkill source', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\ndescription: dist skill\nscope: workspace\n---\nSkill dist', 'utf8') - - const plugin = new SkillInputCapability() - await expect(plugin.collect(createContext(tempWorkspace, createMockLogger()))).rejects.toThrow('Field "scope" must be "project" or "global"') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('warns and ignores authored skill name metadata', async () => { - const warnings: string[] = [] - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-name-warning-test-')) - const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') - const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') - - try { - fs.mkdirSync(srcSkillDir, {recursive: true}) - fs.mkdirSync(distSkillDir, {recursive: true}) - fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\nname: custom-demo\ndescription: src skill\n---\nSkill source', 'utf8') - fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\nname: custom-demo\ndescription: dist skill\n---\nSkill dist', 'utf8') - - const plugin = new SkillInputCapability() - const result = await plugin.collect(createContext(tempWorkspace, createMockLogger(warnings))) - const [skill] = result.skills ?? [] - - expect(skill?.skillName).toBe('demo') - expect(skill?.yamlFrontMatter?.name).toBe('demo') - expect(skill?.yamlFrontMatter?.description).toBe('dist skill') - expect(warnings).toContain('SKILL_NAME_IGNORED') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/inputs/input-agentskills.ts b/cli/src/inputs/input-agentskills.ts deleted file mode 100644 index 129a430c..00000000 --- a/cli/src/inputs/input-agentskills.ts +++ /dev/null @@ -1,836 +0,0 @@ -import type {ParsedMarkdown} from '@truenine/md-compiler/markdown' -import type {Dirent} from 'node:fs' -import type { - ILogger, - InputCapabilityContext, - InputCollectedContext, - McpServerConfig, - SkillChildDoc, - SkillMcpConfig, - SkillPrompt, - SkillResource, - SkillResourceEncoding, - SkillYAMLFrontMatter -} from '../plugins/plugin-core' -import type {ResourceScanResult} from './input-agentskills-types' - -import {Buffer} from 'node:buffer' -import * as nodePath from 'node:path' -import {transformMdxReferencesToMd} from '@truenine/md-compiler/markdown' -import { - buildConfigDiagnostic, - buildDiagnostic, - buildFileOperationDiagnostic, - buildPathStateDiagnostic, - buildPromptCompilerDiagnostic, - diagnosticLines -} from '@/diagnostics' -import { - AbstractInputCapability, - createLocalizedPromptReader, - FilePathKind, - hasSourcePromptExtension, - PromptKind, - SourceLocaleExtensions, - validateSkillMetadata -} from '../plugins/plugin-core' -import {assertNoResidualModuleSyntax, MissingCompiledPromptError} from '../plugins/plugin-core/DistPromptGuards' -import {readPromptArtifact} from '../plugins/plugin-core/PromptArtifactCache' -import { - formatPromptCompilerDiagnostic, - resolveSourcePathForDistFile -} from '../plugins/plugin-core/PromptCompilerDiagnostics' - -export * from './input-agentskills-types' // Re-export from types file - -interface WritableSkillMetadata { - name?: string - description?: string - displayName?: string - keywords?: string[] - author?: string - version?: string - allowTools?: string[] - [key: string]: unknown -} - -const EXPORT_DEFAULT_REGEX = /export\s+default\s*\{([\s\S]*?)\}/u -const DESCRIPTION_REGEX = /description\s*:\s*['"`]([^'"`]+)['"`]/u -const NAME_REGEX = /name\s*:\s*['"`]([^'"`]+)['"`]/u -const DISPLAY_NAME_REGEX = /displayName\s*:\s*['"`]([^'"`]+)['"`]/u -const KEYWORDS_REGEX = /keywords\s*:\s*\[([^\]]+)\]/u -const AUTHOR_REGEX = /author\s*:\s*['"`]([^'"`]+)['"`]/u -const VERSION_REGEX = /version\s*:\s*['"`]([^'"`]+)['"`]/u - -function extractSkillMetadataFromExport(content: string): WritableSkillMetadata { - const metadata: WritableSkillMetadata = {} - - const exportMatch = EXPORT_DEFAULT_REGEX.exec(content) - if (exportMatch?.[1] == null) return metadata - - const objectContent = exportMatch[1] - - const descriptionMatch = DESCRIPTION_REGEX.exec(objectContent) - if (descriptionMatch?.[1] != null) metadata.description = descriptionMatch[1] - - const nameMatch = NAME_REGEX.exec(objectContent) - if (nameMatch?.[1] != null) metadata.name = nameMatch[1] - - const displayNameMatch = DISPLAY_NAME_REGEX.exec(objectContent) - if (displayNameMatch?.[1] != null) metadata.displayName = displayNameMatch[1] - - const keywordsMatch = KEYWORDS_REGEX.exec(objectContent) - if (keywordsMatch?.[1] != null) { - metadata.keywords = keywordsMatch[1] - .split(',') - .map(k => k.trim().replaceAll(/['"]/gu, '')) - .filter(k => k.length > 0) - } - - const authorMatch = AUTHOR_REGEX.exec(objectContent) - if (authorMatch?.[1] != null) metadata.author = authorMatch[1] - - const versionMatch = VERSION_REGEX.exec(objectContent) - if (versionMatch?.[1] != null) metadata.version = versionMatch[1] - - return metadata -} - -function mergeDefinedSkillMetadata( - ...sources: (Record | undefined)[] -): WritableSkillMetadata { - const merged: WritableSkillMetadata = {} - - for (const source of sources) { - if (source == null) continue - - for (const [key, value] of Object.entries(source)) { - if (value !== void 0) (merged as Record)[key] = value - } - } - - return merged -} - -function warnIgnoredSkillName(options: { - readonly logger: ILogger - readonly warnedDerivedNames?: Set - readonly sourcePath: string - readonly authoredName: string - readonly skillName: string -}): void { - const {logger, warnedDerivedNames, sourcePath, authoredName, skillName} = options - if (warnedDerivedNames?.has(sourcePath) === true) return - - warnedDerivedNames?.add(sourcePath) - logger.warn(buildConfigDiagnostic({ - code: 'SKILL_NAME_IGNORED', - title: 'Skill authored name is ignored', - reason: diagnosticLines( - `tnmsc ignores the authored skill name "${authoredName}" in favor of the directory-derived name "${skillName}".` - ), - configPath: sourcePath, - exactFix: diagnosticLines( - 'Remove the `name` field from the skill front matter or exported metadata.', - 'Rename the skill directory if you need a different skill name.' - ), - details: { - authoredName, - derivedName: skillName - } - })) -} - -const MIME_TYPES: Record = { // MIME types for resources - '.ts': 'text/typescript', - '.tsx': 'text/typescript', - '.js': 'text/javascript', - '.jsx': 'text/javascript', - '.json': 'application/json', - '.py': 'text/x-python', - '.java': 'text/x-java', - '.kt': 'text/x-kotlin', - '.go': 'text/x-go', - '.rs': 'text/x-rust', - '.c': 'text/x-c', - '.cpp': 'text/x-c++', - '.cs': 'text/x-csharp', - '.rb': 'text/x-ruby', - '.php': 'text/x-php', - '.swift': 'text/x-swift', - '.scala': 'text/x-scala', - '.sql': 'application/sql', - '.xml': 'application/xml', - '.yaml': 'text/yaml', - '.yml': 'text/yaml', - '.toml': 'text/toml', - '.csv': 'text/csv', - '.graphql': 'application/graphql', - '.txt': 'text/plain', - '.pdf': 'application/pdf', - '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', - '.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', - '.html': 'text/html', - '.css': 'text/css', - '.svg': 'image/svg+xml', - '.png': 'image/png', - '.jpg': 'image/jpeg', - '.jpeg': 'image/jpeg', - '.gif': 'image/gif', - '.webp': 'image/webp', - '.ico': 'image/x-icon', - '.bmp': 'image/bmp' -} - -const SKILL_RESOURCE_BINARY_EXTENSIONS = new Set([ // Binary extensions - '.png', - '.jpg', - '.jpeg', - '.gif', - '.webp', - '.ico', - '.bmp', - '.tiff', - '.svg', - '.exe', - '.dll', - '.so', - '.dylib', - '.bin', - '.wasm', - '.class', - '.jar', - '.war', - '.pyd', - '.pyc', - '.pyo', - '.zip', - '.tar', - '.gz', - '.bz2', - '.7z', - '.rar', - '.ttf', - '.otf', - '.woff', - '.woff2', - '.eot', - '.db', - '.sqlite', - '.sqlite3', - '.pdf', - '.docx', - '.doc', - '.xlsx', - '.xls', - '.pptx', - '.ppt', - '.odt', - '.ods', - '.odp' -]) - -function isBinaryResourceExtension(ext: string): boolean { - return SKILL_RESOURCE_BINARY_EXTENSIONS.has(ext.toLowerCase()) -} - -function getMimeType(ext: string): string | undefined { - return MIME_TYPES[ext.toLowerCase()] -} - -function pathJoin(...segments: string[]): string { - const joined = nodePath.join(...segments) - return joined.replaceAll('\\', '/') -} - -interface ResourceProcessorContext { - readonly fs: typeof import('node:fs') - readonly logger: ILogger - readonly skillDir: string - readonly scanMode: 'distChildDocs' | 'srcResources' - readonly sourceSkillDir?: string - readonly globalScope?: InputCapabilityContext['globalScope'] -} - -class ResourceProcessor { - private readonly ctx: ResourceProcessorContext - - constructor(ctx: ResourceProcessorContext) { - this.ctx = ctx - } - - async processDirectory(entry: Dirent, currentRelativePath: string, filePath: string): Promise { - const relativePath = currentRelativePath - ? `${currentRelativePath}/${entry.name}` - : entry.name - return this.scanSkillDirectoryAsync(filePath, relativePath) - } - - async processFile(entry: Dirent, currentRelativePath: string, filePath: string): Promise { - const relativePath = currentRelativePath - ? `${currentRelativePath}/${entry.name}` - : entry.name - - if (this.ctx.scanMode === 'distChildDocs') { - if (currentRelativePath === '' && entry.name === 'skill.mdx') return {childDocs: [], resources: []} - if (hasSourcePromptExtension(entry.name) || !entry.name.endsWith('.mdx')) return {childDocs: [], resources: []} - - const childDoc = await this.processChildDoc(relativePath, filePath) - return {childDocs: childDoc ? [childDoc] : [], resources: []} - } - - if (currentRelativePath === '' && entry.name === 'mcp.json') return {childDocs: [], resources: []} - if (hasSourcePromptExtension(entry.name) || entry.name.endsWith('.mdx')) return {childDocs: [], resources: []} - - const resource = this.processResourceFile(entry.name, relativePath, filePath) - return {childDocs: [], resources: resource ? [resource] : []} - } - - private async processChildDoc(relativePath: string, filePath: string): Promise { - try { - const artifact = await readPromptArtifact(filePath, { - mode: 'dist', - globalScope: this.ctx.globalScope - }) - const compiledContent = transformMdxReferencesToMd(artifact.content) - assertNoResidualModuleSyntax(compiledContent, filePath) - - return { - type: PromptKind.SkillChildDoc, - content: compiledContent, - length: compiledContent.length, - filePathKind: FilePathKind.Relative, - markdownAst: artifact.parsed.markdownAst, - markdownContents: artifact.parsed.markdownContents, - ...artifact.parsed.rawFrontMatter != null && {rawFrontMatter: artifact.parsed.rawFrontMatter}, - relativePath, - dir: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: this.ctx.skillDir, - getDirectoryName: () => nodePath.dirname(relativePath), - getAbsolutePath: () => filePath - } - } - } - catch (error) { - this.ctx.logger.error(buildPromptCompilerDiagnostic({ - code: 'SKILL_CHILD_DOC_COMPILE_FAILED', - title: 'Failed to compile skill child doc', - diagnosticText: formatPromptCompilerDiagnostic(error, { - operation: 'Failed to compile skill child doc.', - promptKind: 'skill-child-doc', - logicalName: `${nodePath.basename(this.ctx.skillDir)}/${relativePath.replace(/\.mdx$/u, '')}`, - distPath: filePath, - srcPath: resolveSourcePathForDistFile(nodePath, filePath, { - distRootDir: this.ctx.skillDir, - srcRootDir: this.ctx.sourceSkillDir - }) - }), - details: { - skillDir: this.ctx.skillDir, - relativePath, - filePath - } - })) - throw error - } - } - - private processResourceFile(fileName: string, relativePath: string, filePath: string): SkillResource | null { - const ext = nodePath.extname(fileName) - - try { - const {content, encoding, length} = this.readFileContent(filePath, ext) - const mimeType = getMimeType(ext) - - const resource: SkillResource = { - type: PromptKind.SkillResource, - extension: ext, - fileName, - relativePath, - sourcePath: filePath, - content, - encoding, - length, - ...mimeType != null && {mimeType} - } - - return resource - } - catch (e) { - this.ctx.logger.warn(buildFileOperationDiagnostic({ - code: 'SKILL_RESOURCE_READ_FAILED', - title: 'Failed to read skill resource file', - operation: 'read', - targetKind: 'skill resource file', - path: filePath, - error: e, - details: { - relativePath, - fileName, - skillDir: this.ctx.skillDir - } - })) - return null - } - } - - private readFileContent(filePath: string, ext: string): {content: string, encoding: SkillResourceEncoding, length: number} { - if (isBinaryResourceExtension(ext)) { - const buffer = this.ctx.fs.readFileSync(filePath) - return { - content: buffer.toString('base64'), - encoding: 'base64', - length: buffer.length - } - } - - const content = this.ctx.fs.readFileSync(filePath, 'utf8') - return { - content, - encoding: 'text', - length: Buffer.from(content, 'utf8').length - } - } - - async scanSkillDirectoryAsync(currentDir: string, currentRelativePath: string = ''): Promise { - const childDocs: SkillChildDoc[] = [] - const resources: SkillResource[] = [] - - let entries: Dirent[] - try { - entries = this.ctx.fs.readdirSync(currentDir, {withFileTypes: true}) - } - catch (e) { - this.ctx.logger.warn(buildFileOperationDiagnostic({ - code: 'SKILL_DIRECTORY_SCAN_FAILED', - title: 'Failed to scan skill directory', - operation: 'scan', - targetKind: 'skill directory', - path: currentDir, - error: e, - details: { - skillDir: this.ctx.skillDir, - scanMode: this.ctx.scanMode - } - })) - return {childDocs, resources} - } - - for (const entry of entries) { - const filePath = pathJoin(currentDir, entry.name) - - if (entry.isDirectory()) { - const subResult = await this.processDirectory(entry, currentRelativePath, filePath) - childDocs.push(...subResult.childDocs) - resources.push(...subResult.resources) - continue - } - - if (!entry.isFile()) continue - - const fileResult = await this.processFile(entry, currentRelativePath, filePath) - childDocs.push(...fileResult.childDocs) - resources.push(...fileResult.resources) - } - - return {childDocs, resources} - } -} - -function collectExpectedCompiledChildDocPaths( - skillDir: string, - fs: typeof import('node:fs'), - logger: ILogger, - currentRelativePath: string = '' -): string[] { - const expectedPaths: string[] = [] - const currentDir = currentRelativePath === '' - ? skillDir - : pathJoin(skillDir, currentRelativePath) - - let entries: Dirent[] - try { - entries = fs.readdirSync(currentDir, {withFileTypes: true}) - } - catch (error) { - logger.warn(buildFileOperationDiagnostic({ - code: 'SKILL_SOURCE_CHILD_SCAN_FAILED', - title: 'Failed to scan skill source child docs', - operation: 'scan', - targetKind: 'skill source child doc directory', - path: currentDir, - error - })) - return expectedPaths - } - - for (const entry of entries) { - const entryRelativePath = currentRelativePath - ? `${currentRelativePath}/${entry.name}` - : entry.name - - if (entry.isDirectory()) { - expectedPaths.push(...collectExpectedCompiledChildDocPaths(skillDir, fs, logger, entryRelativePath)) - continue - } - - if (!entry.isFile() || !hasSourcePromptExtension(entry.name)) continue - if (currentRelativePath === '' && entry.name === 'skill.src.mdx') continue - - expectedPaths.push(entryRelativePath.replace(/\.src\.mdx$/u, '.mdx')) - } - - return expectedPaths -} - -function assertCompiledChildDocsExist( - skillName: string, - skillSrcDir: string, - skillDistDir: string, - fs: typeof import('node:fs'), - logger: ILogger -): void { - if (!fs.existsSync(skillSrcDir)) return - - for (const relativePath of collectExpectedCompiledChildDocPaths(skillSrcDir, fs, logger)) { - const distPath = nodePath.join(skillDistDir, relativePath) - if (fs.existsSync(distPath)) continue - - throw new MissingCompiledPromptError({ - kind: 'skill child doc', - name: `${skillName}/${relativePath}`, - sourcePath: nodePath.join(skillSrcDir, relativePath.replace(/\.mdx$/u, '.src.mdx')), - expectedDistPath: distPath - }) - } -} - -function readMcpConfig( - skillDir: string, - fs: typeof import('node:fs'), - logger: ILogger -): SkillMcpConfig | undefined { - const mcpJsonPath = nodePath.join(skillDir, 'mcp.json') - - if (!fs.existsSync(mcpJsonPath)) return void 0 - - if (!fs.statSync(mcpJsonPath).isFile()) { - logger.warn(buildPathStateDiagnostic({ - code: 'SKILL_MCP_CONFIG_NOT_FILE', - title: 'Skill MCP config path is not a file', - path: mcpJsonPath, - expectedKind: 'mcp.json file', - actualState: 'path exists but is not a regular file', - details: { - skillDir - } - })) - return void 0 - } - - try { - const rawContent = fs.readFileSync(mcpJsonPath, 'utf8') - const parsed = JSON.parse(rawContent) as {mcpServers?: Record} - - if (parsed.mcpServers == null || typeof parsed.mcpServers !== 'object') { - logger.warn(buildConfigDiagnostic({ - code: 'SKILL_MCP_CONFIG_INVALID', - title: 'Skill MCP config is missing mcpServers', - reason: diagnosticLines( - `The skill MCP config at "${mcpJsonPath}" does not contain a top-level mcpServers object.` - ), - configPath: mcpJsonPath, - exactFix: diagnosticLines( - 'Add a top-level `mcpServers` object to mcp.json before retrying tnmsc.' - ), - details: { - skillDir - } - })) - return void 0 - } - - return { - type: PromptKind.SkillMcpConfig, - mcpServers: parsed.mcpServers, - rawContent - } - } - catch (e) { - logger.warn(buildConfigDiagnostic({ - code: 'SKILL_MCP_CONFIG_PARSE_FAILED', - title: 'Failed to parse skill MCP config', - reason: diagnosticLines( - `tnmsc could not parse the MCP config file at "${mcpJsonPath}".`, - `Underlying error: ${e instanceof Error ? e.message : String(e)}` - ), - configPath: mcpJsonPath, - exactFix: diagnosticLines('Fix the JSON syntax in mcp.json and rerun tnmsc.'), - details: { - skillDir, - errorMessage: e instanceof Error ? e.message : String(e) - } - })) - return void 0 - } -} - -async function createSkillPrompt( - content: string, - _locale: 'zh' | 'en', - name: string, - skillDir: string, - skillAbsoluteDir: string, - sourceSkillAbsoluteDir: string, - ctx: InputCapabilityContext, - mcpConfig?: SkillMcpConfig, - childDocs: SkillPrompt['childDocs'] = [], - resources: SkillPrompt['resources'] = [], - seriName?: string | string[] | null, - compiledMetadata?: Record, - warnedDerivedNames?: Set -): Promise { - const {logger, fs} = ctx - - const distFilePath = nodePath.join(skillAbsoluteDir, 'skill.mdx') - const sourceFilePath = fs.existsSync(nodePath.join(sourceSkillAbsoluteDir, 'skill.src.mdx')) - ? nodePath.join(sourceSkillAbsoluteDir, 'skill.src.mdx') - : distFilePath - let rawContent = content - let parsed: ParsedMarkdown | undefined, - distMetadata: Record | undefined - - if (fs.existsSync(distFilePath)) { - const artifact = await readPromptArtifact(distFilePath, { - mode: 'dist', - globalScope: ctx.globalScope - }) - rawContent = artifact.rawMdx - parsed = artifact.parsed as ParsedMarkdown - content = transformMdxReferencesToMd(artifact.content) - assertNoResidualModuleSyntax(content, distFilePath) - distMetadata = artifact.metadata - } - - const exportMetadata = mergeDefinedSkillMetadata( - extractSkillMetadataFromExport(rawContent), - compiledMetadata, - distMetadata - ) // Merge fallback export parsing with compiled metadata so empty metadata objects do not mask valid fields - - const authoredNames = new Set() - const yamlName = parsed?.yamlFrontMatter?.name - if (typeof yamlName === 'string' && yamlName.trim().length > 0) authoredNames.add(yamlName) - const exportedName = exportMetadata.name - if (typeof exportedName === 'string' && exportedName.trim().length > 0) authoredNames.add(exportedName) - - for (const authoredName of authoredNames) { - warnIgnoredSkillName({ - logger, - sourcePath: sourceFilePath, - authoredName, - skillName: name, - ...warnedDerivedNames != null && {warnedDerivedNames} - }) - } - - const finalDescription = parsed?.yamlFrontMatter?.description ?? exportMetadata?.description - - if (finalDescription == null || finalDescription.trim().length === 0) { // Strict validation: description must exist and not be empty - logger.error(buildDiagnostic({ - code: 'SKILL_VALIDATION_FAILED', - title: 'Skill description is required', - rootCause: diagnosticLines( - `The skill "${name}" does not provide a non-empty description in its compiled metadata or front matter.` - ), - exactFix: diagnosticLines( - 'Add a non-empty description field to the skill front matter or exported metadata and rebuild the skill.' - ), - possibleFixes: [ - diagnosticLines('Set `description` in `SKILL.md` front matter.'), - diagnosticLines('If you export metadata from code, ensure the exported description is non-empty.') - ], - details: { - skill: name, - skillDir, - yamlDescription: parsed?.yamlFrontMatter?.description, - exportDescription: exportMetadata?.description - } - })) - throw new Error(`Skill "${name}" validation failed: description is required and cannot be empty`) - } - - const mergedFrontMatter: SkillYAMLFrontMatter = { - ...exportMetadata, - ...parsed?.yamlFrontMatter ?? {}, - name, - description: finalDescription - } as SkillYAMLFrontMatter - - const validation = validateSkillMetadata(mergedFrontMatter as Record, distFilePath) - if (!validation.valid) throw new Error(validation.errors.join('\n')) - - return { - type: PromptKind.Skill, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - skillName: name, - yamlFrontMatter: mergedFrontMatter, - markdownAst: parsed?.markdownAst, - markdownContents: parsed?.markdownContents ?? [], - dir: { - pathKind: FilePathKind.Relative, - path: name, - basePath: skillDir, - getDirectoryName: () => name, - getAbsolutePath: () => nodePath.join(skillDir, name) - }, - ...parsed?.rawFrontMatter != null && {rawFrontMatter: parsed.rawFrontMatter}, - ...mcpConfig != null && {mcpConfig}, - ...childDocs != null && childDocs.length > 0 && {childDocs}, - ...resources != null && resources.length > 0 && {resources}, - ...seriName != null && {seriName} - } as SkillPrompt -} - -export class SkillInputCapability extends AbstractInputCapability { - constructor() { - super('SkillInputCapability') - } - - readMcpConfig( - skillDir: string, - fs: typeof import('node:fs'), - logger: ILogger - ): SkillMcpConfig | undefined { - return readMcpConfig(skillDir, fs, logger) - } - - async scanSkillDirectory( - skillDir: string, - fs: typeof import('node:fs'), - logger: ILogger, - currentRelativePath: string = '', - scanMode: 'distChildDocs' | 'srcResources' = 'srcResources', - globalScope?: InputCapabilityContext['globalScope'], - sourceSkillDir?: string - ): Promise { - const processor = new ResourceProcessor({ - fs, - logger, - skillDir, - scanMode, - ...globalScope != null && {globalScope}, - ...sourceSkillDir != null && {sourceSkillDir} - }) - return processor.scanSkillDirectoryAsync(skillDir, currentRelativePath) - } - - async collect(ctx: InputCapabilityContext): Promise> { - const {userConfigOptions: options, logger, fs, path: pathModule, globalScope} = ctx - const {aindexDir} = this.resolveBasePaths(options) - - const srcSkillDir = this.resolveAindexPath(options.aindex.skills.src, aindexDir) - const distSkillDir = this.resolveAindexPath(options.aindex.skills.dist, aindexDir) - - const flatSkills: SkillPrompt[] = [] - const reader = createLocalizedPromptReader(fs, pathModule, logger, globalScope) - const warnedDerivedNames = new Set() - const skillArtifactCache = new Map() - - const getSkillArtifacts = async (name: string): Promise<{ - readonly childDocs: SkillChildDoc[] - readonly resources: SkillResource[] - readonly mcpConfig?: SkillMcpConfig - }> => { - const cached = skillArtifactCache.get(name) - if (cached != null) return cached - - const skillSrcDir = pathModule.join(srcSkillDir, name) - const skillDistDir = pathModule.join(distSkillDir, name) - - const childDocs = fs.existsSync(skillDistDir) - ? (await this.scanSkillDirectory(skillDistDir, fs, logger, '', 'distChildDocs', globalScope, skillSrcDir)).childDocs - : [] - const resources = fs.existsSync(skillSrcDir) - ? (await this.scanSkillDirectory(skillSrcDir, fs, logger, '', 'srcResources', globalScope)).resources - : [] - const mcpConfig = readMcpConfig(skillSrcDir, fs, logger) - - assertCompiledChildDocsExist(name, skillSrcDir, skillDistDir, fs, logger) - - const artifacts = { - childDocs, - resources, - ...mcpConfig != null && {mcpConfig} - } - - skillArtifactCache.set(name, artifacts) - return artifacts - } - - const {prompts: localizedSkills, errors} = await reader.readDirectoryStructure( - srcSkillDir, - distSkillDir, - { - kind: PromptKind.Skill, - entryFileName: 'skill', - localeExtensions: SourceLocaleExtensions, - hydrateSourceContents: false, - isDirectoryStructure: true, - createPrompt: async (content, locale, name, metadata) => { - const skillDistDir = pathModule.join(distSkillDir, name) - const {childDocs, resources, mcpConfig} = await getSkillArtifacts(name) - - return createSkillPrompt( - content, - locale, - name, - distSkillDir, - skillDistDir, - pathModule.join(srcSkillDir, name), - ctx, - mcpConfig, - childDocs, - resources, - void 0, - metadata, - warnedDerivedNames - ) - } - } - ) - - for (const error of errors) { - logger.warn(buildFileOperationDiagnostic({ - code: 'SKILL_PROMPT_READ_FAILED', - title: 'Failed to read skill prompt', - operation: error.phase === 'scan' ? 'scan' : 'read', - targetKind: 'skill prompt', - path: error.path, - error: error.error, - details: { - phase: error.phase - } - })) - } - - if (errors.length > 0) throw new Error(errors.map(error => error.error instanceof Error ? error.error.message : String(error.error)).join('\n')) - - for (const localized of localizedSkills) { - const prompt = localized.dist?.prompt - if (prompt != null) flatSkills.push(prompt) - } - - return { - skills: flatSkills - } - } -} diff --git a/cli/src/inputs/input-aindex.test.ts b/cli/src/inputs/input-aindex.test.ts deleted file mode 100644 index 38f41bf9..00000000 --- a/cli/src/inputs/input-aindex.test.ts +++ /dev/null @@ -1,187 +0,0 @@ -import type {InputCapabilityContext} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it, vi} from 'vitest' -import {mergeConfig} from '../config' -import {AindexInputCapability} from './input-aindex' - -function createLoggerMock(): { - readonly logger: InputCapabilityContext['logger'] - readonly error: ReturnType - readonly warn: ReturnType -} { - const error = vi.fn() - const warn = vi.fn() - - return { - logger: { - error, - warn, - info: vi.fn(), - debug: vi.fn(), - trace: vi.fn(), - fatal: vi.fn() - }, - error, - warn - } -} - -function createContext( - tempWorkspace: string, - logger: InputCapabilityContext['logger'] -): InputCapabilityContext { - return { - logger, - fs, - path, - glob, - userConfigOptions: mergeConfig({workspaceDir: tempWorkspace}), - dependencyContext: {} - } as InputCapabilityContext -} - -function createAindexProject( - tempWorkspace: string, - projectName: string, - series: 'app' | 'ext' | 'arch' | 'softwares' = 'app' -): { - readonly configDir: string -} { - const distProjectDir = path.join(tempWorkspace, 'aindex', 'dist', series, projectName) - const configDir = path.join(tempWorkspace, 'aindex', series, projectName) - - fs.mkdirSync(distProjectDir, {recursive: true}) - fs.mkdirSync(configDir, {recursive: true}) - - return {configDir} -} - -describe('aindex input capability project config loading', () => { - it('loads project.json5 using JSON5 features without any jsonc fallback', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-aindex-project-json5-')) - const {logger, warn} = createLoggerMock() - - try { - const {configDir} = createAindexProject(tempWorkspace, 'project-a') - fs.writeFileSync(path.join(configDir, 'project.json5'), [ - '{', - ' // JSON5 comment support', - ' includeSeries: [\'alpha\'],', - ' subSeries: {', - ' skills: [\'ship-*\'],', - ' },', - '}', - '' - ].join('\n'), 'utf8') - - const result = await new AindexInputCapability().collect(createContext(tempWorkspace, logger)) - const project = result.workspace?.projects[0] - - expect(project?.name).toBe('project-a') - expect(project?.projectConfig).toEqual({ - includeSeries: ['alpha'], - subSeries: { - skills: ['ship-*'] - } - }) - expect(warn).not.toHaveBeenCalled() - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('ignores legacy project.jsonc after the hard cut', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-aindex-project-jsonc-legacy-')) - const {logger, warn} = createLoggerMock() - - try { - const {configDir} = createAindexProject(tempWorkspace, 'project-b') - fs.writeFileSync(path.join(configDir, 'project.jsonc'), '{"includeSeries":["legacy"]}\n', 'utf8') - - const result = await new AindexInputCapability().collect(createContext(tempWorkspace, logger)) - const project = result.workspace?.projects[0] - - expect(project?.name).toBe('project-b') - expect(project?.projectConfig).toBeUndefined() - expect(warn).not.toHaveBeenCalled() - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('emits JSON5 diagnostics for invalid project.json5 syntax', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-aindex-project-json5-invalid-')) - const {logger, warn} = createLoggerMock() - - try { - const {configDir} = createAindexProject(tempWorkspace, 'project-c') - fs.writeFileSync(path.join(configDir, 'project.json5'), '{includeSeries: [\'broken\',]} trailing', 'utf8') - - const result = await new AindexInputCapability().collect(createContext(tempWorkspace, logger)) - const project = result.workspace?.projects[0] - const diagnostic = warn.mock.calls[0]?.[0] - - expect(project?.name).toBe('project-c') - expect(project?.projectConfig).toBeUndefined() - expect(warn).toHaveBeenCalledTimes(1) - expect(diagnostic).toEqual(expect.objectContaining({ - code: 'AINDEX_PROJECT_JSON5_INVALID', - title: 'Failed to parse project.json5 for project-c', - exactFix: ['Fix the JSON5 syntax in project.json5 and rerun tnmsc.'] - })) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('collects app, ext, arch, and softwares projects with series-aware metadata', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-aindex-project-series-')) - const {logger} = createLoggerMock() - - try { - createAindexProject(tempWorkspace, 'project-a', 'app') - createAindexProject(tempWorkspace, 'plugin-a', 'ext') - createAindexProject(tempWorkspace, 'system-a', 'arch') - createAindexProject(tempWorkspace, 'tool-a', 'softwares') - - const result = await new AindexInputCapability().collect(createContext(tempWorkspace, logger)) - const projects = result.workspace?.projects ?? [] - - expect(projects.map(project => `${project.promptSeries}:${project.name}`)).toEqual([ - 'app:project-a', - 'ext:plugin-a', - 'arch:system-a', - 'softwares:tool-a' - ]) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('fails fast when app, ext, arch, and softwares reuse the same project name', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-aindex-project-conflict-')) - const {logger, error} = createLoggerMock() - - try { - createAindexProject(tempWorkspace, 'project-a', 'app') - createAindexProject(tempWorkspace, 'project-a', 'softwares') - - await expect(new AindexInputCapability().collect(createContext(tempWorkspace, logger))) - .rejects - .toThrow('Aindex project series name conflict') - expect(error).toHaveBeenCalledWith(expect.objectContaining({ - code: 'AINDEX_PROJECT_SERIES_NAME_CONFLICT' - })) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/inputs/input-aindex.ts b/cli/src/inputs/input-aindex.ts deleted file mode 100644 index 048d7216..00000000 --- a/cli/src/inputs/input-aindex.ts +++ /dev/null @@ -1,270 +0,0 @@ -import type {InputCapabilityContext, InputCollectedContext, Project, ProjectConfig, Workspace} from '../plugins/plugin-core' -import type {AindexProjectSeriesConfig} from '@/aindex-project-series' - -import JSON5 from 'json5' -import { - collectAindexProjectSeriesProjectNameConflicts, - resolveAindexProjectSeriesConfigs -} from '@/aindex-project-series' -import { - buildConfigDiagnostic, - buildFileOperationDiagnostic, - diagnosticLines -} from '@/diagnostics' -import {AbstractInputCapability, FilePathKind} from '../plugins/plugin-core' - -export class AindexInputCapability extends AbstractInputCapability { - private static readonly projectConfigFileName = 'project.json5' - private static readonly conflictingProjectSeriesCode = 'AINDEX_PROJECT_SERIES_NAME_CONFLICT' - - constructor() { - super('AindexInputCapability') - } - - private loadProjectConfig( - projectName: string, - aindexDir: string, - srcPath: string, - fs: InputCapabilityContext['fs'], - path: InputCapabilityContext['path'], - logger: InputCapabilityContext['logger'] - ): ProjectConfig | undefined { - const configPath = path.join( - aindexDir, - srcPath, - projectName, - AindexInputCapability.projectConfigFileName - ) - if (!fs.existsSync(configPath)) return void 0 - - try { - const raw = fs.readFileSync(configPath, 'utf8') - - try { - return JSON5.parse(raw) - } - catch (e) { - logger.warn(buildConfigDiagnostic({ - code: 'AINDEX_PROJECT_JSON5_INVALID', - title: `Failed to parse ${AindexInputCapability.projectConfigFileName} for ${projectName}`, - reason: diagnosticLines( - `tnmsc could not parse the ${AindexInputCapability.projectConfigFileName} file for "${projectName}".`, - `Underlying error: ${e instanceof Error ? e.message : String(e)}` - ), - configPath, - exactFix: diagnosticLines( - `Fix the JSON5 syntax in ${AindexInputCapability.projectConfigFileName} and rerun tnmsc.` - ), - details: { - projectName, - errorMessage: e instanceof Error ? e.message : String(e) - } - })) - return void 0 - } - } - catch (e) { - logger.warn(buildConfigDiagnostic({ - code: 'AINDEX_PROJECT_JSON5_READ_FAILED', - title: `Failed to load ${AindexInputCapability.projectConfigFileName} for ${projectName}`, - reason: diagnosticLines( - `tnmsc could not read the ${AindexInputCapability.projectConfigFileName} file for "${projectName}".`, - `Underlying error: ${e instanceof Error ? e.message : String(e)}` - ), - configPath, - exactFix: diagnosticLines( - `Ensure ${AindexInputCapability.projectConfigFileName} exists, is readable, and contains valid JSON5.` - ), - details: { - projectName, - errorMessage: e instanceof Error ? e.message : String(e) - } - })) - return void 0 - } - } - - private async scanSeriesProjects( - ctx: InputCapabilityContext, - workspaceDir: string, - aindexDir: string, - aindexName: string, - projectNameSource: readonly AindexProjectSeriesConfig[] - ): Promise { - const {logger, fs, path} = ctx - const projectGroups = await Promise.all(projectNameSource.map(async series => { - const aindexProjectsDir = this.resolveAindexPath(series.dist, aindexDir) - const distDirStat = await fs.promises.stat(aindexProjectsDir).catch(() => void 0) - if (!(distDirStat?.isDirectory() === true)) return [] - - try { - const entries = (await fs.promises.readdir(aindexProjectsDir, {withFileTypes: true})) - .filter(entry => entry.isDirectory()) - .sort((a, b) => a.name.localeCompare(b.name)) - const projects: Project[] = [] - - for (const entry of entries) { - const isTheAindex = entry.name === aindexName - const projectConfig = this.loadProjectConfig(entry.name, aindexDir, series.src, fs, path, logger) - - projects.push({ - name: entry.name, - promptSeries: series.name, - ...isTheAindex && {isPromptSourceProject: true}, - ...projectConfig != null && {projectConfig}, - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: entry.name, - basePath: workspaceDir, - getDirectoryName: () => entry.name, - getAbsolutePath: () => path.resolve(workspaceDir, entry.name) - } - }) - } - - return projects - } - catch (e) { - logger.error(buildFileOperationDiagnostic({ - code: 'AINDEX_PROJECT_DIRECTORY_SCAN_FAILED', - title: `Failed to scan aindex ${series.name} projects directory`, - operation: 'scan', - targetKind: `aindex ${series.name} projects directory`, - path: aindexProjectsDir, - error: e - })) - - return [] - } - })) - - return projectGroups.flat() - } - - private loadFallbackProjectConfig( - projectName: string, - aindexDir: string, - ctx: Pick - ): ProjectConfig | undefined { - for (const series of resolveAindexProjectSeriesConfigs(ctx.userConfigOptions)) { - const config = this.loadProjectConfig(projectName, aindexDir, series.src, ctx.fs, ctx.path, ctx.logger) - if (config != null) return config - } - - return void 0 - } - - private assertNoCrossSeriesProjectNameConflicts( - ctx: Pick, - aindexDir: string, - projectSeries: readonly AindexProjectSeriesConfig[] - ): void { - const {logger, fs, path} = ctx - const projectRefs = projectSeries.flatMap(series => { - const seriesSourceDir = path.join(aindexDir, series.src) - if (!(fs.existsSync(seriesSourceDir) && fs.statSync(seriesSourceDir).isDirectory())) return [] - - return fs - .readdirSync(seriesSourceDir, {withFileTypes: true}) - .filter(entry => entry.isDirectory()) - .map(entry => ({ - projectName: entry.name, - seriesName: series.name, - seriesDir: path.join(seriesSourceDir, entry.name) - })) - }) - const conflicts = collectAindexProjectSeriesProjectNameConflicts(projectRefs) - if (conflicts.length === 0) return - - logger.error(buildConfigDiagnostic({ - code: AindexInputCapability.conflictingProjectSeriesCode, - title: 'Project names must be unique across app, ext, arch, and softwares', - reason: diagnosticLines( - 'tnmsc maps project-scoped outputs back to workspace project names, so app/ext/arch/softwares cannot reuse the same directory name.', - `Conflicting project names: ${conflicts.map(conflict => conflict.projectName).join(', ')}` - ), - exactFix: diagnosticLines( - 'Rename the conflicting project directory in one of the app/ext/arch/softwares source trees and rerun tnmsc.' - ), - possibleFixes: conflicts.map(conflict => diagnosticLines( - `"${conflict.projectName}" is currently declared in: ${conflict.refs.map(ref => `${ref.seriesName} (${ref.seriesDir})`).join(', ')}` - )), - details: { - aindexDir, - conflicts: conflicts.map(conflict => ({ - projectName: conflict.projectName, - refs: conflict.refs.map(ref => ({ - seriesName: ref.seriesName, - seriesDir: ref.seriesDir - })) - })) - } - })) - - throw new Error('Aindex project series name conflict') - } - - async collect(ctx: InputCapabilityContext): Promise> { - const {userConfigOptions: options, logger, fs, path} = ctx - const {workspaceDir, aindexDir} = this.resolveBasePaths(options) - const aindexName = path.basename(aindexDir) - const projectSeries = resolveAindexProjectSeriesConfigs(options) - - // Project outputs intentionally collapse to /, so - // app/ext/arch/softwares must never reuse the same project directory name. - this.assertNoCrossSeriesProjectNameConflicts(ctx, aindexDir, projectSeries) - - const aindexProjects = await this.scanSeriesProjects(ctx, workspaceDir, aindexDir, aindexName, projectSeries) - - if (aindexProjects.length === 0 && fs.existsSync(workspaceDir) && fs.statSync(workspaceDir).isDirectory()) { - logger.debug('no projects in dist/app, dist/ext, or dist/arch; falling back to workspace scan', {workspaceDir}) - try { - const entries = fs - .readdirSync(workspaceDir, {withFileTypes: true}) - .filter(entry => entry.isDirectory()) - .sort((a, b) => a.name.localeCompare(b.name)) - - for (const entry of entries) { - if (entry.name.startsWith('.')) continue - - const isTheAindex = entry.name === aindexName - const projectConfig = this.loadFallbackProjectConfig(entry.name, aindexDir, ctx) - - aindexProjects.push({ - name: entry.name, - ...isTheAindex && {isPromptSourceProject: true}, - ...projectConfig != null && {projectConfig}, - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: entry.name, - basePath: workspaceDir, - getDirectoryName: () => entry.name, - getAbsolutePath: () => path.resolve(workspaceDir, entry.name) - } - }) - } - } - catch (e) { - logger.error(buildFileOperationDiagnostic({ - code: 'WORKSPACE_DIRECTORY_SCAN_FAILED', - title: 'Failed to scan workspace directory', - operation: 'scan', - targetKind: 'workspace directory', - path: workspaceDir, - error: e - })) - } - } - - const workspace: Workspace = { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir) - }, - projects: aindexProjects - } - - return {workspace} - } -} diff --git a/cli/src/inputs/input-command.test.ts b/cli/src/inputs/input-command.test.ts deleted file mode 100644 index 49957086..00000000 --- a/cli/src/inputs/input-command.test.ts +++ /dev/null @@ -1,148 +0,0 @@ -import type {InputCapabilityContext} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {createLogger} from '../plugins/plugin-core' -import {CommandInputCapability} from './input-command' - -const legacySourceExtension = '.cn.mdx' - -function createContext(tempWorkspace: string): InputCapabilityContext { - const options = mergeConfig({workspaceDir: tempWorkspace}) - - return { - logger: createLogger('CommandInputCapabilityTest', 'error'), - fs, - path, - glob, - userConfigOptions: options, - dependencyContext: {} - } as InputCapabilityContext -} - -describe('command input plugin', () => { - it('prefers dist content, and dist also runs mdx-to-md compilation', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-test-')) - const aindexDir = path.join(tempWorkspace, 'aindex') - const srcDir = path.join(aindexDir, 'commands') - const distDir = path.join(aindexDir, 'dist', 'commands') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.mkdirSync(distDir, {recursive: true}) - - const srcFile = path.join(srcDir, 'demo.src.mdx') - const distFile = path.join(distDir, 'demo.mdx') - const srcContent = '---\ndescription: src\n---\nCommand source' - const distContent = '---\ndescription: dist\n---\nexport const x = 1\n\nCommand dist' - fs.writeFileSync(srcFile, srcContent, 'utf8') - fs.writeFileSync(distFile, distContent, 'utf8') - - const plugin = new CommandInputCapability() - const result = await plugin.collect(createContext(tempWorkspace)) - const [command] = result.commands ?? [] - - expect(result.commands?.length ?? 0).toBe(1) - expect(command?.commandName).toBe('demo') - expect(command?.content).toContain('Command dist') - expect(command?.content).not.toContain('Command source') - expect(command?.content).not.toContain('export const x = 1') - expect(command?.yamlFrontMatter?.description).toBe('dist') - expect(command?.rawMdxContent).toContain('export const x = 1') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('loads commands from dist when the source tree is missing', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-dist-only-test-')) - const aindexDir = path.join(tempWorkspace, 'aindex') - const distDir = path.join(aindexDir, 'dist', 'commands') - - try { - fs.mkdirSync(distDir, {recursive: true}) - fs.writeFileSync( - path.join(distDir, 'demo.mdx'), - '---\ndescription: dist only\n---\nDist only command', - 'utf8' - ) - - const plugin = new CommandInputCapability() - const result = await plugin.collect(createContext(tempWorkspace)) - - expect(result.commands?.length ?? 0).toBe(1) - expect(result.commands?.[0]?.commandName).toBe('demo') - expect(result.commands?.[0]?.content).toContain('Dist only command') - expect(result.commands?.[0]?.yamlFrontMatter?.description).toBe('dist only') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('fails hard when source exists without a compiled dist pair', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-source-only-test-')) - const srcDir = path.join(tempWorkspace, 'aindex', 'commands') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.writeFileSync( - path.join(srcDir, 'demo.src.mdx'), - '---\ndescription: source only\n---\nSource only command', - 'utf8' - ) - - const plugin = new CommandInputCapability() - await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Missing compiled dist prompt') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('ignores legacy cn command sources', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-legacy-test-')) - const srcDir = path.join(tempWorkspace, 'aindex', 'commands') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.writeFileSync( - path.join(srcDir, `demo${legacySourceExtension}`), - '---\ndescription: legacy\n---\nLegacy command', - 'utf8' - ) - - const plugin = new CommandInputCapability() - const result = await plugin.collect(createContext(tempWorkspace)) - - expect(result.commands ?? []).toHaveLength(0) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('rejects workspace as an unsupported command scope', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-workspace-scope-test-')) - const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'commands') - - try { - fs.mkdirSync(distDir, {recursive: true}) - fs.writeFileSync( - path.join(distDir, 'demo.mdx'), - '---\nscope: workspace\n---\nDist only command', - 'utf8' - ) - - const plugin = new CommandInputCapability() - await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Field "scope" must be "project" or "global"') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/inputs/input-command.ts b/cli/src/inputs/input-command.ts deleted file mode 100644 index 69026663..00000000 --- a/cli/src/inputs/input-command.ts +++ /dev/null @@ -1,152 +0,0 @@ -import type { - CommandPrompt, - CommandYAMLFrontMatter, - InputCapabilityContext, - InputCollectedContext, - Locale -} from '../plugins/plugin-core' -import {buildFileOperationDiagnostic} from '@/diagnostics' -import { - AbstractInputCapability, - createLocalizedPromptReader, - FilePathKind, - PromptKind, - SourceLocaleExtensions, - validateCommandMetadata - -} from '../plugins/plugin-core' - -export class CommandInputCapability extends AbstractInputCapability { - constructor() { - super('CommandInputCapability') - } - - private createCommandPrompt( - content: string, - _locale: Locale, - name: string, - distDir: string, - ctx: InputCapabilityContext, - metadata?: Record - ): CommandPrompt { - const {path} = ctx - - const normalizedName = name.replaceAll('\\', '/') // Normalize Windows backslashes to forward slashes - const slashIndex = normalizedName.indexOf('/') - const parentDirName = slashIndex !== -1 ? normalizedName.slice(0, slashIndex) : void 0 - const fileName = slashIndex !== -1 ? normalizedName.slice(slashIndex + 1) : normalizedName - - const baseName = fileName.replace(/\.mdx$/, '') - const underscoreIndex = baseName.indexOf('_') - const commandPrefix = parentDirName ?? (underscoreIndex === -1 ? void 0 : baseName.slice(0, Math.max(0, underscoreIndex))) - const commandName = parentDirName != null || underscoreIndex === -1 - ? baseName - : baseName.slice(Math.max(0, underscoreIndex + 1)) - - const filePath = path.join(distDir, `${name}.mdx`) - const entryName = `${name}.mdx` - const yamlFrontMatter = metadata as CommandYAMLFrontMatter | undefined - - const prompt: CommandPrompt = { - type: PromptKind.Command, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: entryName, - basePath: distDir, - getDirectoryName: () => entryName.replace(/\.mdx$/, ''), - getAbsolutePath: () => filePath - }, - ...commandPrefix != null && {commandPrefix}, - commandName - } as CommandPrompt - - if (yamlFrontMatter == null) return prompt - - const validation = validateCommandMetadata(yamlFrontMatter as Record, filePath) - if (!validation.valid) throw new Error(validation.errors.join('\n')) - - Object.assign(prompt, {yamlFrontMatter}) - if (yamlFrontMatter.seriName != null) Object.assign(prompt, {seriName: yamlFrontMatter.seriName}) - if (yamlFrontMatter.scope === 'global') Object.assign(prompt, {globalOnly: true}) - return prompt - } - - override async collect(ctx: InputCapabilityContext): Promise> { - const {userConfigOptions: options, logger, path, fs, globalScope} = ctx - const resolvedPaths = this.resolveBasePaths(options) - - const srcDir = this.resolveAindexPath(options.aindex.commands.src, resolvedPaths.aindexDir) - const distDir = this.resolveAindexPath(options.aindex.commands.dist, resolvedPaths.aindexDir) - - logger.debug('CommandInputCapability collecting', { - srcDir, - distDir, - aindexDir: resolvedPaths.aindexDir - }) - - const reader = createLocalizedPromptReader(fs, path, logger, globalScope) - - const {prompts: localizedCommands, errors} = await reader.readFlatFiles( - srcDir, - distDir, - { - kind: PromptKind.Command, - localeExtensions: SourceLocaleExtensions, - hydrateSourceContents: false, - isDirectoryStructure: false, - createPrompt: (content, locale, name, metadata) => this.createCommandPrompt( - content, - locale, - name, - distDir, - ctx, - metadata - ) - } - ) - - logger.debug('CommandInputCapability read complete', { - commandCount: localizedCommands.length, - errorCount: errors.length - }) - - for (const error of errors) { - logger.warn(buildFileOperationDiagnostic({ - code: 'COMMAND_PROMPT_READ_FAILED', - title: 'Failed to read command prompt', - operation: error.phase === 'scan' ? 'scan' : 'read', - targetKind: 'command prompt', - path: error.path, - error: error.error, - details: { - phase: error.phase - } - })) - } - - if (errors.length > 0) throw new Error(errors.map(error => error.error instanceof Error ? error.error.message : String(error.error)).join('\n')) - - const flatCommands: CommandPrompt[] = [] - for (const localized of localizedCommands) { - const distContent = localized.dist - if (distContent?.prompt == null) continue - - const {prompt: distPrompt, rawMdx} = distContent - flatCommands.push(rawMdx == null - ? distPrompt - : {...distPrompt, rawMdxContent: rawMdx}) - } - - logger.debug('CommandInputCapability flattened commands', { - count: flatCommands.length, - commands: flatCommands.map(c => c.commandName) - }) - - return { - commands: flatCommands - } - } -} diff --git a/cli/src/inputs/input-editorconfig.ts b/cli/src/inputs/input-editorconfig.ts deleted file mode 100644 index aaeda25f..00000000 --- a/cli/src/inputs/input-editorconfig.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type {InputCapabilityContext, InputCollectedContext, ProjectIDEConfigFile} from '../plugins/plugin-core' -import {AbstractInputCapability, IDEKind} from '../plugins/plugin-core' -import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' - -export class EditorConfigInputCapability extends AbstractInputCapability { - constructor() { - super('EditorConfigInputCapability') - } - - collect(ctx: InputCapabilityContext): Partial { - const {userConfigOptions, fs} = ctx - const {workspaceDir, aindexDir} = this.resolveBasePaths(userConfigOptions) - - const editorConfigFiles: ProjectIDEConfigFile[] = [] - const file = readPublicIdeConfigDefinitionFile(IDEKind.EditorConfig, '.editorconfig', aindexDir, fs, { - command: ctx.runtimeCommand, - workspaceDir - }) - if (file != null) editorConfigFiles.push(file) - - return {editorConfigFiles} - } -} diff --git a/cli/src/inputs/input-git-exclude.ts b/cli/src/inputs/input-git-exclude.ts deleted file mode 100644 index 388aec02..00000000 --- a/cli/src/inputs/input-git-exclude.ts +++ /dev/null @@ -1,32 +0,0 @@ -import type {InputCapabilityContext, InputCollectedContext} from '../plugins/plugin-core' -import {AbstractInputCapability} from '../plugins/plugin-core' -import {PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, resolvePublicDefinitionPath} from '../public-config-paths' - -export class GitExcludeInputCapability extends AbstractInputCapability { - constructor() { - super('GitExcludeInputCapability') - } - - collect(ctx: InputCapabilityContext): Partial { - const {workspaceDir, aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) - const filePath = resolvePublicDefinitionPath(aindexDir, PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, { - command: ctx.runtimeCommand, - workspaceDir - }) - - if (!ctx.fs.existsSync(filePath)) { - this.log.debug({action: 'collect', message: 'File not found', path: filePath}) - return {} - } - - const content = ctx.fs.readFileSync(filePath, 'utf8') - - if (content.length === 0) { - this.log.debug({action: 'collect', message: 'File is empty', path: filePath}) - return {} - } - - this.log.debug({action: 'collect', message: 'Loaded file content', path: filePath, length: content.length}) - return {shadowGitExclude: content} - } -} diff --git a/cli/src/inputs/input-gitignore.ts b/cli/src/inputs/input-gitignore.ts deleted file mode 100644 index 2dcd1cd3..00000000 --- a/cli/src/inputs/input-gitignore.ts +++ /dev/null @@ -1,32 +0,0 @@ -import type {InputCapabilityContext, InputCollectedContext} from '../plugins/plugin-core' -import {AbstractInputCapability} from '../plugins/plugin-core' -import {PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, resolvePublicDefinitionPath} from '../public-config-paths' - -export class GitIgnoreInputCapability extends AbstractInputCapability { - constructor() { - super('GitIgnoreInputCapability') - } - - collect(ctx: InputCapabilityContext): Partial { - const {workspaceDir, aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) - const filePath = resolvePublicDefinitionPath(aindexDir, PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, { - command: ctx.runtimeCommand, - workspaceDir - }) - - if (!ctx.fs.existsSync(filePath)) { - this.log.debug({action: 'collect', message: 'File not found', path: filePath}) - return {} - } - - const content = ctx.fs.readFileSync(filePath, 'utf8') - - if (content.length === 0) { - this.log.debug({action: 'collect', message: 'File is empty', path: filePath}) - return {} - } - - this.log.debug({action: 'collect', message: 'Loaded file content', path: filePath, length: content.length}) - return {globalGitIgnore: content} - } -} diff --git a/cli/src/inputs/input-global-memory.ts b/cli/src/inputs/input-global-memory.ts deleted file mode 100644 index c23faf34..00000000 --- a/cli/src/inputs/input-global-memory.ts +++ /dev/null @@ -1,136 +0,0 @@ -import type {InputCapabilityContext, InputCollectedContext} from '../plugins/plugin-core' - -import process from 'node:process' - -import {CompilerDiagnosticError, ScopeError} from '@truenine/md-compiler/errors' -import {getGlobalConfigPath} from '@/ConfigLoader' -import { - buildConfigDiagnostic, - buildPathStateDiagnostic, - buildPromptCompilerDiagnostic, - diagnosticLines -} from '@/diagnostics' -import {getEffectiveHomeDir} from '@/runtime-environment' -import {AbstractInputCapability, FilePathKind, GlobalConfigDirectoryType, PromptKind} from '../plugins/plugin-core' -import {assertNoResidualModuleSyntax} from '../plugins/plugin-core/DistPromptGuards' -import {readPromptArtifact} from '../plugins/plugin-core/PromptArtifactCache' -import {formatPromptCompilerDiagnostic} from '../plugins/plugin-core/PromptCompilerDiagnostics' - -export class GlobalMemoryInputCapability extends AbstractInputCapability { - constructor() { - super('GlobalMemoryInputCapability') - } - - async collect(ctx: InputCapabilityContext): Promise> { - const {userConfigOptions: options, fs, path, globalScope} = ctx - const {aindexDir} = this.resolveBasePaths(options) - const globalConfigPath = getGlobalConfigPath() - const effectiveHomeDir = getEffectiveHomeDir() - - const globalMemoryFile = this.resolveAindexPath(options.aindex.globalPrompt.dist, aindexDir) - - if (!fs.existsSync(globalMemoryFile)) { - this.log.warn(buildPathStateDiagnostic({ - code: 'GLOBAL_MEMORY_PROMPT_MISSING', - title: 'Global memory prompt is missing', - path: globalMemoryFile, - expectedKind: 'compiled global memory prompt file', - actualState: 'path does not exist' - })) - return {} - } - - if (!fs.statSync(globalMemoryFile).isFile()) { - this.log.warn(buildPathStateDiagnostic({ - code: 'GLOBAL_MEMORY_PROMPT_NOT_FILE', - title: 'Global memory prompt path is not a file', - path: globalMemoryFile, - expectedKind: 'compiled global memory prompt file', - actualState: 'path exists but is not a regular file' - })) - return {} - } - - let compiledContent: string, - artifact: Awaited> - try { - artifact = await readPromptArtifact(globalMemoryFile, { - mode: 'dist', - globalScope - }) - compiledContent = artifact.content - assertNoResidualModuleSyntax(compiledContent, globalMemoryFile) - } - catch (e) { - if (e instanceof CompilerDiagnosticError) { - this.log.error(buildPromptCompilerDiagnostic({ - code: 'GLOBAL_MEMORY_PROMPT_COMPILE_FAILED', - title: 'Failed to compile global memory prompt', - diagnosticText: formatPromptCompilerDiagnostic(e, { - operation: 'Failed to compile global memory prompt.', - promptKind: 'global-memory', - logicalName: 'global-memory', - distPath: globalMemoryFile - }), - details: { - promptKind: 'global-memory', - distPath: globalMemoryFile - } - })) - if (e instanceof ScopeError) { - this.log.error(buildConfigDiagnostic({ - code: 'GLOBAL_MEMORY_SCOPE_VARIABLES_MISSING', - title: 'Global memory prompt references missing config variables', - reason: diagnosticLines( - `The global memory prompt uses scope variables that are not defined in "${globalConfigPath}".` - ), - configPath: globalConfigPath, - exactFix: diagnosticLines( - `Add the missing variables to "${globalConfigPath}" and rerun tnmsc.` - ), - possibleFixes: [ - diagnosticLines('If you reference `{profile.name}`, define `profile.name` in the config file.') - ], - details: { - promptPath: globalMemoryFile, - errorMessage: e.message - } - })) - } - process.exit(1) - } - throw e - } - - this.log.debug({action: 'collect', path: globalMemoryFile, contentLength: compiledContent.length}) - - return { - globalMemory: { - type: PromptKind.GlobalMemory, - content: compiledContent, - length: compiledContent.length, - filePathKind: FilePathKind.Relative, - ...artifact.parsed.rawFrontMatter != null && {rawFrontMatter: artifact.parsed.rawFrontMatter}, - markdownAst: artifact.parsed.markdownAst, - markdownContents: artifact.parsed.markdownContents, - dir: { - pathKind: FilePathKind.Relative, - path: path.basename(globalMemoryFile), - basePath: path.dirname(globalMemoryFile), - getDirectoryName: () => path.basename(globalMemoryFile), - getAbsolutePath: () => globalMemoryFile - }, - parentDirectoryPath: { - type: GlobalConfigDirectoryType.UserHome, - directory: { - pathKind: FilePathKind.Relative, - path: '', - basePath: effectiveHomeDir, - getDirectoryName: () => path.basename(effectiveHomeDir), - getAbsolutePath: () => effectiveHomeDir - } - } - } - } - } -} diff --git a/cli/src/inputs/input-jetbrains-config.ts b/cli/src/inputs/input-jetbrains-config.ts deleted file mode 100644 index cc8f5e88..00000000 --- a/cli/src/inputs/input-jetbrains-config.ts +++ /dev/null @@ -1,31 +0,0 @@ -import type {InputCapabilityContext, InputCollectedContext, ProjectIDEConfigFile} from '../plugins/plugin-core' -import {AbstractInputCapability, IDEKind} from '../plugins/plugin-core' -import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' - -export class JetBrainsConfigInputCapability extends AbstractInputCapability { - constructor() { - super('JetBrainsConfigInputCapability') - } - - collect(ctx: InputCapabilityContext): Partial { - const {userConfigOptions, fs} = ctx - const {workspaceDir, aindexDir} = this.resolveBasePaths(userConfigOptions) - - const files = [ - '.idea/codeStyles/Project.xml', - '.idea/codeStyles/codeStyleConfig.xml', - '.idea/.gitignore' - ] - const jetbrainsConfigFiles: ProjectIDEConfigFile[] = [] - - for (const relativePath of files) { - const file = readPublicIdeConfigDefinitionFile(IDEKind.IntellijIDEA, relativePath, aindexDir, fs, { - command: ctx.runtimeCommand, - workspaceDir - }) - if (file != null) jetbrainsConfigFiles.push(file) - } - - return {jetbrainsConfigFiles} - } -} diff --git a/cli/src/inputs/input-project-prompt.test.ts b/cli/src/inputs/input-project-prompt.test.ts deleted file mode 100644 index 6a5dd8a0..00000000 --- a/cli/src/inputs/input-project-prompt.test.ts +++ /dev/null @@ -1,176 +0,0 @@ -import type {InputCapabilityContext, Project, Workspace} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {createLogger, FilePathKind, WORKSPACE_ROOT_PROJECT_NAME} from '../plugins/plugin-core' -import {ProjectPromptInputCapability} from './input-project-prompt' - -function createProject( - tempWorkspace: string, - name: string, - overrides: Partial = {} -): Project { - return { - name, - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: name, - basePath: tempWorkspace, - getDirectoryName: () => name, - getAbsolutePath: () => path.join(tempWorkspace, name) - }, - ...overrides - } -} - -function createWorkspace(tempWorkspace: string, projects: readonly Project[] = [createProject(tempWorkspace, 'project-a')]): Workspace { - return { - directory: { - pathKind: FilePathKind.Absolute, - path: tempWorkspace, - getDirectoryName: () => path.basename(tempWorkspace), - getAbsolutePath: () => tempWorkspace - }, - projects: [...projects] - } -} - -function createContext(tempWorkspace: string, workspace: Workspace = createWorkspace(tempWorkspace)): InputCapabilityContext { - return { - logger: createLogger('ProjectPromptInputCapabilityTest', 'error'), - fs, - path, - glob, - userConfigOptions: mergeConfig({workspaceDir: tempWorkspace}), - dependencyContext: { - workspace - } - } as InputCapabilityContext -} - -describe('project prompt input plugin workspace prompt support', () => { - it('injects a synthetic workspace project from aindex/dist/workspace.mdx only', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-workspace-prompt-')) - const workspacePromptPath = path.join(tempWorkspace, 'aindex', 'dist', 'workspace.mdx') - - try { - fs.mkdirSync(path.dirname(workspacePromptPath), {recursive: true}) - fs.writeFileSync(workspacePromptPath, '---\ndescription: workspace\n---\nWorkspace prompt body', 'utf8') - - const plugin = new ProjectPromptInputCapability() - const result = await plugin.collect(createContext(tempWorkspace)) - const projects = result.workspace?.projects ?? [] - const workspaceProject = projects.find(project => project.isWorkspaceRootProject === true) - - expect(workspaceProject).toBeDefined() - expect(workspaceProject?.name).toBe(WORKSPACE_ROOT_PROJECT_NAME) - expect(workspaceProject?.rootMemoryPrompt?.content).toContain('Workspace prompt body') - expect(workspaceProject?.childMemoryPrompts).toBeUndefined() - expect(workspaceProject?.isPromptSourceProject).not.toBe(true) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('does not fall back to workspace/dist/workspace.mdx when aindex dist prompt is missing', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-workspace-prompt-fallback-')) - const wrongPromptPath = path.join(tempWorkspace, 'dist', 'workspace.mdx') - - try { - fs.mkdirSync(path.dirname(wrongPromptPath), {recursive: true}) - fs.writeFileSync(wrongPromptPath, 'Workspace prompt from the wrong place', 'utf8') - - const plugin = new ProjectPromptInputCapability() - const result = await plugin.collect(createContext(tempWorkspace)) - const projects = result.workspace?.projects ?? [] - - expect(projects.some(project => project.isWorkspaceRootProject === true)).toBe(false) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('inherits the prompt source project config for the synthetic workspace project', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-workspace-prompt-config-')) - const workspacePromptPath = path.join(tempWorkspace, 'aindex', 'dist', 'workspace.mdx') - const promptSourceProjectConfig = { - includeSeries: ['prompt-source-series'], - subSeries: { - skills: ['ship-*'] - } - } - - try { - fs.mkdirSync(path.dirname(workspacePromptPath), {recursive: true}) - fs.writeFileSync(workspacePromptPath, 'Workspace prompt body', 'utf8') - - const workspace = createWorkspace(tempWorkspace, [ - createProject(tempWorkspace, 'project-a', { - projectConfig: { - includeSeries: ['fallback-series'] - } - }), - createProject(tempWorkspace, 'project-b', { - isPromptSourceProject: true, - projectConfig: promptSourceProjectConfig - }) - ]) - - const plugin = new ProjectPromptInputCapability() - const result = await plugin.collect(createContext(tempWorkspace, workspace)) - const workspaceProject = result.workspace?.projects?.find(project => project.isWorkspaceRootProject === true) - - expect(workspaceProject?.projectConfig).toEqual(promptSourceProjectConfig) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('loads ext, arch, and softwares project prompts using the same agt.mdx workflow as app', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-project-prompt-series-')) - const extRoot = path.join(tempWorkspace, 'aindex', 'dist', 'ext', 'plugin-a') - const archRoot = path.join(tempWorkspace, 'aindex', 'dist', 'arch', 'system-a') - const softwareRoot = path.join(tempWorkspace, 'aindex', 'dist', 'softwares', 'tool-a') - - try { - fs.mkdirSync(path.join(extRoot, 'docs'), {recursive: true}) - fs.mkdirSync(path.join(archRoot, 'design'), {recursive: true}) - fs.mkdirSync(path.join(softwareRoot, 'manual'), {recursive: true}) - fs.writeFileSync(path.join(extRoot, 'agt.mdx'), 'Ext root prompt', 'utf8') - fs.writeFileSync(path.join(extRoot, 'docs', 'agt.mdx'), 'Ext child prompt', 'utf8') - fs.writeFileSync(path.join(archRoot, 'agt.mdx'), 'Arch root prompt', 'utf8') - fs.writeFileSync(path.join(archRoot, 'design', 'agt.mdx'), 'Arch child prompt', 'utf8') - fs.writeFileSync(path.join(softwareRoot, 'agt.mdx'), 'Software root prompt', 'utf8') - fs.writeFileSync(path.join(softwareRoot, 'manual', 'agt.mdx'), 'Software child prompt', 'utf8') - - const workspace = createWorkspace(tempWorkspace, [ - createProject(tempWorkspace, 'plugin-a', {promptSeries: 'ext'}), - createProject(tempWorkspace, 'system-a', {promptSeries: 'arch'}), - createProject(tempWorkspace, 'tool-a', {promptSeries: 'softwares'}) - ]) - - const plugin = new ProjectPromptInputCapability() - const result = await plugin.collect(createContext(tempWorkspace, workspace)) - const projects = result.workspace?.projects ?? [] - const extProject = projects.find(project => project.name === 'plugin-a') - const archProject = projects.find(project => project.name === 'system-a') - const softwareProject = projects.find(project => project.name === 'tool-a') - - expect(extProject?.rootMemoryPrompt?.content).toContain('Ext root prompt') - expect(extProject?.childMemoryPrompts?.[0]?.content).toContain('Ext child prompt') - expect(archProject?.rootMemoryPrompt?.content).toContain('Arch root prompt') - expect(archProject?.childMemoryPrompts?.[0]?.content).toContain('Arch child prompt') - expect(softwareProject?.rootMemoryPrompt?.content).toContain('Software root prompt') - expect(softwareProject?.childMemoryPrompts?.[0]?.content).toContain('Software child prompt') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/inputs/input-project-prompt.ts b/cli/src/inputs/input-project-prompt.ts deleted file mode 100644 index e5039491..00000000 --- a/cli/src/inputs/input-project-prompt.ts +++ /dev/null @@ -1,435 +0,0 @@ -import type { - InputCapabilityContext, - InputCollectedContext, - Project, - ProjectChildrenMemoryPrompt, - ProjectRootMemoryPrompt, - YAMLFrontMatter -} from '../plugins/plugin-core' - -import process from 'node:process' - -import {CompilerDiagnosticError, ScopeError} from '@truenine/md-compiler/errors' -import {resolveAindexProjectSeriesConfig, resolveAindexProjectSeriesConfigs} from '@/aindex-project-series' -import {getGlobalConfigPath} from '@/ConfigLoader' -import { - buildConfigDiagnostic, - buildFileOperationDiagnostic, - buildPromptCompilerDiagnostic, - diagnosticLines -} from '@/diagnostics' -import {AbstractInputCapability, FilePathKind, PromptKind, WORKSPACE_ROOT_PROJECT_NAME} from '../plugins/plugin-core' -import {assertNoResidualModuleSyntax} from '../plugins/plugin-core/DistPromptGuards' -import {readPromptArtifact} from '../plugins/plugin-core/PromptArtifactCache' -import {formatPromptCompilerDiagnostic} from '../plugins/plugin-core/PromptCompilerDiagnostics' - -const PROJECT_MEMORY_FILE = 'agt.mdx' -const SCAN_SKIP_DIRECTORIES: readonly string[] = ['node_modules', '.git'] as const - -export class ProjectPromptInputCapability extends AbstractInputCapability { - constructor() { - super('ProjectPromptInputCapability', ['AindexInputCapability']) - } - - async collect(ctx: InputCapabilityContext): Promise> { - const {dependencyContext, fs, userConfigOptions: options, path, globalScope} = ctx - const {aindexDir} = this.resolveBasePaths(options) - const workspacePromptPath = this.resolveAindexPath(options.aindex.workspacePrompt.dist, aindexDir) - - const dependencyWorkspace = dependencyContext.workspace - if (dependencyWorkspace == null) { - this.log.info('No workspace found in dependency context, skipping project prompt enhancement') - return {} - } - - const projects = dependencyWorkspace.projects ?? [] - - const enhancedProjects = await Promise.all(projects.map(async project => { - const projectName = project.name - if (projectName == null) return project - if (project.isWorkspaceRootProject === true) return project - - const seriesConfigs = project.promptSeries != null - ? [resolveAindexProjectSeriesConfig(options, project.promptSeries)] - : resolveAindexProjectSeriesConfigs(options) - const matchingSeries = seriesConfigs.find(series => { - const shadowProjectPath = path.join(aindexDir, series.dist, projectName) - return fs.existsSync(shadowProjectPath) && fs.statSync(shadowProjectPath).isDirectory() - }) - if (matchingSeries == null) return project - - const shadowProjectPath = path.join(aindexDir, matchingSeries.dist, projectName) - - const targetProjectPath = project.dirFromWorkspacePath?.getAbsolutePath() - - const rootMemoryPrompt = await this.readRootMemoryPrompt(ctx, shadowProjectPath, globalScope) - const childMemoryPrompts = targetProjectPath != null - ? await this.scanChildMemoryPrompts(ctx, shadowProjectPath, targetProjectPath, globalScope) - : [] - - return { - ...project, - ...project.promptSeries == null ? {promptSeries: matchingSeries.name} : {}, - ...rootMemoryPrompt != null && {rootMemoryPrompt}, - ...childMemoryPrompts.length > 0 && {childMemoryPrompts} - } - })) - - const workspaceRootProject = await this.readWorkspaceRootProjectPrompt( - ctx, - workspacePromptPath, - globalScope, - this.resolveWorkspaceRootProjectConfig(projects) - ) - - return { - workspace: { - directory: dependencyWorkspace.directory, - projects: workspaceRootProject == null - ? enhancedProjects - : [...enhancedProjects, workspaceRootProject] - } - } - } - - private async readWorkspaceRootProjectPrompt( - ctx: InputCapabilityContext, - filePath: string, - globalScope: InputCapabilityContext['globalScope'], - projectConfig: Project['projectConfig'] - ): Promise { - const {fs, logger} = ctx - - if (!fs.existsSync(filePath) || !fs.statSync(filePath).isFile()) return - - try { - let artifact: Awaited> - try { - artifact = await readPromptArtifact(filePath, { - mode: 'dist', - globalScope - }) - assertNoResidualModuleSyntax(artifact.content, filePath) - } - catch (e) { - if (e instanceof CompilerDiagnosticError) { - logger.error(buildPromptCompilerDiagnostic({ - code: 'WORKSPACE_ROOT_MEMORY_PROMPT_COMPILE_FAILED', - title: 'Failed to compile workspace root memory prompt', - diagnosticText: formatPromptCompilerDiagnostic(e, { - operation: 'Failed to compile workspace root memory prompt.', - promptKind: 'workspace-root-memory', - logicalName: filePath, - distPath: filePath - }), - details: { - promptKind: 'workspace-root-memory', - distPath: filePath - } - })) - if (e instanceof ScopeError) { - const globalConfigPath = getGlobalConfigPath() - logger.error(buildConfigDiagnostic({ - code: 'WORKSPACE_ROOT_MEMORY_SCOPE_VARIABLES_MISSING', - title: 'Workspace root memory prompt references missing config variables', - reason: diagnosticLines( - `The workspace root memory prompt uses scope variables that are not defined in "${globalConfigPath}".` - ), - configPath: globalConfigPath, - exactFix: diagnosticLines( - `Define the missing variables in "${globalConfigPath}" and rerun tnmsc.` - ), - details: { - promptPath: filePath, - errorMessage: e.message - } - })) - } - process.exit(1) - } - throw e - } - - const rootMemoryPrompt: ProjectRootMemoryPrompt = { - type: PromptKind.ProjectRootMemory, - content: artifact.content, - length: artifact.content.length, - filePathKind: FilePathKind.Relative, - ...artifact.parsed.yamlFrontMatter != null && {yamlFrontMatter: artifact.parsed.yamlFrontMatter as YAMLFrontMatter}, - ...artifact.parsed.rawFrontMatter != null && {rawFrontMatter: artifact.parsed.rawFrontMatter}, - markdownAst: artifact.parsed.markdownAst, - markdownContents: artifact.parsed.markdownContents, - dir: { - pathKind: FilePathKind.Root, - path: '', - getDirectoryName: () => '' - } - } - - return { - name: WORKSPACE_ROOT_PROJECT_NAME, - isWorkspaceRootProject: true, - ...projectConfig != null && {projectConfig}, - rootMemoryPrompt - } - } - catch (e) { - logger.error(buildFileOperationDiagnostic({ - code: 'WORKSPACE_ROOT_MEMORY_PROMPT_READ_FAILED', - title: 'Failed to read workspace root memory prompt', - operation: 'read', - targetKind: 'workspace root memory prompt', - path: filePath, - error: e - })) - return void 0 - } - } - - private resolveWorkspaceRootProjectConfig(projects: readonly Project[]): Project['projectConfig'] { - const concreteProjects = projects.filter(project => project.isWorkspaceRootProject !== true) - const promptSourceProject = concreteProjects.find(project => project.isPromptSourceProject === true) - return promptSourceProject?.projectConfig ?? concreteProjects[0]?.projectConfig - } - - private async readRootMemoryPrompt( - ctx: InputCapabilityContext, - projectPath: string, - globalScope: InputCapabilityContext['globalScope'] - ): Promise { - const {fs, path, logger} = ctx - const filePath = path.join(projectPath, PROJECT_MEMORY_FILE) - - if (!fs.existsSync(filePath) || !fs.statSync(filePath).isFile()) return - - try { - let artifact: Awaited> - try { - artifact = await readPromptArtifact(filePath, { - mode: 'dist', - globalScope - }) - assertNoResidualModuleSyntax(artifact.content, filePath) - } - catch (e) { - if (e instanceof CompilerDiagnosticError) { - logger.error(buildPromptCompilerDiagnostic({ - code: 'PROJECT_ROOT_MEMORY_PROMPT_COMPILE_FAILED', - title: 'Failed to compile project root memory prompt', - diagnosticText: formatPromptCompilerDiagnostic(e, { - operation: 'Failed to compile project root memory prompt.', - promptKind: 'project-root-memory', - logicalName: filePath, - distPath: filePath - }), - details: { - promptKind: 'project-root-memory', - distPath: filePath - } - })) - if (e instanceof ScopeError) { - const globalConfigPath = getGlobalConfigPath() - logger.error(buildConfigDiagnostic({ - code: 'PROJECT_ROOT_MEMORY_SCOPE_VARIABLES_MISSING', - title: 'Project root memory prompt references missing config variables', - reason: diagnosticLines( - `The project root memory prompt uses scope variables that are not defined in "${globalConfigPath}".` - ), - configPath: globalConfigPath, - exactFix: diagnosticLines( - `Define the missing variables in "${globalConfigPath}" and rerun tnmsc.` - ), - details: { - promptPath: filePath, - errorMessage: e.message - } - })) - } - process.exit(1) - } - throw e - } - - return { - type: PromptKind.ProjectRootMemory, - content: artifact.content, - length: artifact.content.length, - filePathKind: FilePathKind.Relative, - ...artifact.parsed.yamlFrontMatter != null && {yamlFrontMatter: artifact.parsed.yamlFrontMatter as YAMLFrontMatter}, - ...artifact.parsed.rawFrontMatter != null && {rawFrontMatter: artifact.parsed.rawFrontMatter}, - markdownAst: artifact.parsed.markdownAst, - markdownContents: artifact.parsed.markdownContents, - dir: { - pathKind: FilePathKind.Root, - path: '', - getDirectoryName: () => '' - } - } - } - catch (e) { - logger.error(buildFileOperationDiagnostic({ - code: 'PROJECT_ROOT_MEMORY_PROMPT_READ_FAILED', - title: 'Failed to read project root memory prompt', - operation: 'read', - targetKind: 'project root memory prompt', - path: filePath, - error: e - })) - return void 0 - } - } - - private async scanChildMemoryPrompts( - ctx: InputCapabilityContext, - shadowProjectPath: string, - targetProjectPath: string, - globalScope: InputCapabilityContext['globalScope'] - ): Promise { - const {logger} = ctx - const prompts: ProjectChildrenMemoryPrompt[] = [] - - try { - await this.scanDirectoryRecursive(ctx, shadowProjectPath, shadowProjectPath, targetProjectPath, prompts, globalScope) - } - catch (e) { - logger.error(buildFileOperationDiagnostic({ - code: 'PROJECT_CHILD_MEMORY_SCAN_FAILED', - title: 'Failed to scan project child memory prompts', - operation: 'scan', - targetKind: 'project child memory prompt directory', - path: shadowProjectPath, - error: e - })) - } - - return prompts - } - - private async scanDirectoryRecursive( - ctx: InputCapabilityContext, - shadowProjectPath: string, - currentPath: string, - targetProjectPath: string, - prompts: ProjectChildrenMemoryPrompt[], - globalScope: InputCapabilityContext['globalScope'] - ): Promise { - const {fs, path} = ctx - - const entries = fs.readdirSync(currentPath, {withFileTypes: true}) - for (const entry of entries) { - if (!entry.isDirectory()) continue - - if (SCAN_SKIP_DIRECTORIES.includes(entry.name)) continue - - const childDir = path.join(currentPath, entry.name) - const memoryFile = path.join(childDir, PROJECT_MEMORY_FILE) - - if (Boolean(fs.existsSync(memoryFile)) && Boolean(fs.statSync(memoryFile).isFile())) { - const prompt = await this.readChildMemoryPrompt(ctx, shadowProjectPath, childDir, targetProjectPath, globalScope) - if (prompt != null) prompts.push(prompt) - } - - await this.scanDirectoryRecursive(ctx, shadowProjectPath, childDir, targetProjectPath, prompts, globalScope) - } - } - - private async readChildMemoryPrompt( - ctx: InputCapabilityContext, - shadowProjectPath: string, - shadowChildDir: string, - targetProjectPath: string, - globalScope: InputCapabilityContext['globalScope'] - ): Promise { - const {path, logger} = ctx - const filePath = path.join(shadowChildDir, PROJECT_MEMORY_FILE) - - try { - let artifact: Awaited> - try { - artifact = await readPromptArtifact(filePath, { - mode: 'dist', - globalScope - }) - assertNoResidualModuleSyntax(artifact.content, filePath) - } - catch (e) { - if (e instanceof CompilerDiagnosticError) { - logger.error(buildPromptCompilerDiagnostic({ - code: 'PROJECT_CHILD_MEMORY_PROMPT_COMPILE_FAILED', - title: 'Failed to compile project child memory prompt', - diagnosticText: formatPromptCompilerDiagnostic(e, { - operation: 'Failed to compile project child memory prompt.', - promptKind: 'project-child-memory', - logicalName: filePath, - distPath: filePath - }), - details: { - promptKind: 'project-child-memory', - distPath: filePath - } - })) - if (e instanceof ScopeError) { - const globalConfigPath = getGlobalConfigPath() - logger.error(buildConfigDiagnostic({ - code: 'PROJECT_CHILD_MEMORY_SCOPE_VARIABLES_MISSING', - title: 'Project child memory prompt references missing config variables', - reason: diagnosticLines( - `The project child memory prompt uses scope variables that are not defined in "${globalConfigPath}".` - ), - configPath: globalConfigPath, - exactFix: diagnosticLines( - `Define the missing variables in "${globalConfigPath}" and rerun tnmsc.` - ), - details: { - promptPath: filePath, - errorMessage: e.message - } - })) - } - process.exit(1) - } - throw e - } - - const relativePath = path.relative(shadowProjectPath, shadowChildDir) - const targetChildDir = path.join(targetProjectPath, relativePath) - const dirName = path.basename(shadowChildDir) - - return { - type: PromptKind.ProjectChildrenMemory, - content: artifact.content, - length: artifact.content.length, - filePathKind: FilePathKind.Relative, - ...artifact.parsed.yamlFrontMatter != null && {yamlFrontMatter: artifact.parsed.yamlFrontMatter as YAMLFrontMatter}, - ...artifact.parsed.rawFrontMatter != null && {rawFrontMatter: artifact.parsed.rawFrontMatter}, - markdownAst: artifact.parsed.markdownAst, - markdownContents: artifact.parsed.markdownContents, - dir: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: targetProjectPath, - getDirectoryName: () => dirName, - getAbsolutePath: () => targetChildDir - }, - workingChildDirectoryPath: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: targetProjectPath, - getDirectoryName: () => dirName, - getAbsolutePath: () => targetChildDir - } - } - } - catch (e) { - logger.error(buildFileOperationDiagnostic({ - code: 'PROJECT_CHILD_MEMORY_PROMPT_READ_FAILED', - title: 'Failed to read project child memory prompt', - operation: 'read', - targetKind: 'project child memory prompt', - path: filePath, - error: e - })) - return void 0 - } - } -} diff --git a/cli/src/inputs/input-public-config.test.ts b/cli/src/inputs/input-public-config.test.ts deleted file mode 100644 index 08c658eb..00000000 --- a/cli/src/inputs/input-public-config.test.ts +++ /dev/null @@ -1,450 +0,0 @@ -import type {InputCapabilityContext} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {createLogger} from '../plugins/plugin-core' -import { - AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS, - PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, - PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, - resolvePublicDefinitionPath -} from '../public-config-paths' -import {EditorConfigInputCapability} from './input-editorconfig' -import {GitExcludeInputCapability} from './input-git-exclude' -import {GitIgnoreInputCapability} from './input-gitignore' -import {JetBrainsConfigInputCapability} from './input-jetbrains-config' -import {AIAgentIgnoreInputCapability} from './input-shared-ignore' -import {VSCodeConfigInputCapability} from './input-vscode-config' -import {ZedConfigInputCapability} from './input-zed-config' - -interface TestContextOptions { - readonly aindexDir?: string - readonly runtimeCommand?: InputCapabilityContext['runtimeCommand'] -} - -function createContext( - tempWorkspace: string, - options?: TestContextOptions -): InputCapabilityContext { - const mergedOptions = mergeConfig({ - workspaceDir: tempWorkspace, - ...(options?.aindexDir != null - ? { - aindex: { - dir: options.aindexDir - } - } - : {}) - }) - - return { - logger: createLogger('PublicConfigInputCapabilityTest', 'error'), - fs, - path, - glob, - userConfigOptions: mergedOptions, - dependencyContext: {}, - ...(options?.runtimeCommand != null - ? {runtimeCommand: options.runtimeCommand} - : {}) - } as InputCapabilityContext -} - -function writePublicDefinition( - tempWorkspace: string, - targetRelativePath: string, - content: string -): string { - const filePath = resolvePublicDefinitionPath( - path.join(tempWorkspace, 'aindex'), - targetRelativePath - ) - fs.mkdirSync(path.dirname(filePath), {recursive: true}) - fs.writeFileSync(filePath, content, 'utf8') - return filePath -} - -function writePublicProxy(tempWorkspace: string, source: string): string { - return writePublicDefinition(tempWorkspace, 'proxy.ts', source) -} - -describe('public config input plugins', () => { - it('reads config definitions from target-relative public paths', () => { - const tempWorkspace = fs.mkdtempSync( - path.join(os.tmpdir(), 'tnmsc-public-config-input-') - ) - - try { - const aindexDir = path.join(tempWorkspace, 'aindex') - const gitIgnorePath = writePublicDefinition( - tempWorkspace, - PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, - 'dist/\n' - ) - const gitExcludePath = writePublicDefinition( - tempWorkspace, - PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, - '.idea/\n' - ) - const editorConfigPath = writePublicDefinition( - tempWorkspace, - '.editorconfig', - 'root = true\n' - ) - writePublicDefinition( - tempWorkspace, - '.vscode/settings.json', - '{"editor.tabSize": 2}\n' - ) - writePublicDefinition( - tempWorkspace, - '.vscode/extensions.json', - '{"recommendations":["foo.bar"]}\n' - ) - writePublicDefinition( - tempWorkspace, - '.zed/settings.json', - '{"tab_size": 2}\n' - ) - writePublicDefinition( - tempWorkspace, - '.idea/.gitignore', - '/workspace.xml\n' - ) - writePublicDefinition( - tempWorkspace, - '.idea/codeStyles/Project.xml', - '\n' - ) - writePublicDefinition( - tempWorkspace, - '.idea/codeStyles/codeStyleConfig.xml', - '\n' - ) - - for (const fileName of AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS) - { writePublicDefinition(tempWorkspace, fileName, `${fileName}\n`) } - - const ctx = createContext(tempWorkspace) - const gitIgnore = new GitIgnoreInputCapability().collect(ctx) - const gitExclude = new GitExcludeInputCapability().collect(ctx) - const editorConfig = new EditorConfigInputCapability().collect(ctx) - const vscode = new VSCodeConfigInputCapability().collect(ctx) - const zed = new ZedConfigInputCapability().collect(ctx) - const jetbrains = new JetBrainsConfigInputCapability().collect(ctx) - const ignoreFiles = new AIAgentIgnoreInputCapability().collect(ctx) - - expect(gitIgnore.globalGitIgnore).toBe('dist/\n') - expect(gitExclude.shadowGitExclude).toBe('.idea/\n') - expect(editorConfig.editorConfigFiles?.[0]?.dir.path).toBe( - editorConfigPath - ) - expect(vscode.vscodeConfigFiles?.map(file => file.dir.path)).toEqual([ - path.join(aindexDir, 'public', '.vscode', 'settings.json'), - path.join(aindexDir, 'public', '.vscode', 'extensions.json') - ]) - expect(zed.zedConfigFiles?.map(file => file.dir.path)).toEqual([ - path.join(aindexDir, 'public', '.zed', 'settings.json') - ]) - expect( - jetbrains.jetbrainsConfigFiles?.map(file => file.dir.path) - ).toEqual([ - path.join(aindexDir, 'public', '.idea', 'codeStyles', 'Project.xml'), - path.join( - aindexDir, - 'public', - '.idea', - 'codeStyles', - 'codeStyleConfig.xml' - ), - path.join(aindexDir, 'public', '.idea', '.gitignore') - ]) - expect( - ignoreFiles.aiAgentIgnoreConfigFiles?.map(file => file.fileName) - ).toEqual([...AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS]) - expect( - ignoreFiles.aiAgentIgnoreConfigFiles?.map(file => file.sourcePath) - ).toEqual( - AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS.map(fileName => - resolvePublicDefinitionPath(aindexDir, fileName)) - ) - expect(gitIgnorePath).toBe(path.join(aindexDir, 'public', '.gitignore')) - expect(gitExcludePath).toBe( - path.join(aindexDir, 'public', '.git', 'info', 'exclude') - ) - } finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('does not read legacy definition locations after the hard cut', () => { - const tempWorkspace = fs.mkdtempSync( - path.join(os.tmpdir(), 'tnmsc-public-config-legacy-') - ) - - try { - const aindexDir = path.join(tempWorkspace, 'aindex') - fs.mkdirSync(path.join(aindexDir, 'public'), {recursive: true}) - fs.mkdirSync(path.join(aindexDir, '.vscode'), {recursive: true}) - fs.mkdirSync(path.join(aindexDir, '.zed'), {recursive: true}) - fs.mkdirSync(path.join(aindexDir, '.idea', 'codeStyles'), { - recursive: true - }) - - fs.writeFileSync( - path.join(aindexDir, 'public', 'gitignore'), - 'legacy gitignore\n', - 'utf8' - ) - fs.writeFileSync( - path.join(aindexDir, 'public', 'exclude'), - 'legacy exclude\n', - 'utf8' - ) - fs.writeFileSync( - path.join(aindexDir, '.editorconfig'), - 'root = true\n', - 'utf8' - ) - fs.writeFileSync( - path.join(aindexDir, '.vscode', 'settings.json'), - '{}\n', - 'utf8' - ) - fs.writeFileSync( - path.join(aindexDir, '.vscode', 'extensions.json'), - '{}\n', - 'utf8' - ) - fs.writeFileSync( - path.join(aindexDir, '.zed', 'settings.json'), - '{}\n', - 'utf8' - ) - fs.writeFileSync( - path.join(aindexDir, '.idea', '.gitignore'), - '/workspace.xml\n', - 'utf8' - ) - fs.writeFileSync( - path.join(aindexDir, '.idea', 'codeStyles', 'Project.xml'), - '\n', - 'utf8' - ) - fs.writeFileSync( - path.join(aindexDir, '.idea', 'codeStyles', 'codeStyleConfig.xml'), - '\n', - 'utf8' - ) - fs.writeFileSync( - path.join(aindexDir, '.cursorignore'), - '.cursor/\n', - 'utf8' - ) - - const ctx = createContext(tempWorkspace) - - expect( - new GitIgnoreInputCapability().collect(ctx).globalGitIgnore - ).toBeUndefined() - expect( - new GitExcludeInputCapability().collect(ctx).shadowGitExclude - ).toBeUndefined() - expect( - new EditorConfigInputCapability().collect(ctx).editorConfigFiles ?? [] - ).toHaveLength(0) - expect( - new VSCodeConfigInputCapability().collect(ctx).vscodeConfigFiles ?? [] - ).toHaveLength(0) - expect( - new ZedConfigInputCapability().collect(ctx).zedConfigFiles ?? [] - ).toHaveLength(0) - expect( - new JetBrainsConfigInputCapability().collect(ctx).jetbrainsConfigFiles ?? [] - ).toHaveLength(0) - expect( - new AIAgentIgnoreInputCapability().collect(ctx).aiAgentIgnoreConfigFiles ?? [] - ).toHaveLength(0) - } finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('routes public definitions through public/proxy.ts transparently', () => { - const tempWorkspace = fs.mkdtempSync( - path.join(os.tmpdir(), 'tnmsc-public-config-proxy-') - ) - - try { - const aindexDir = path.join(tempWorkspace, 'aindex') - writePublicProxy( - tempWorkspace, - [ - 'export default (logicalPath) => {', - ' const normalizedPath = logicalPath.replaceAll("\\\\", "/")', - ' if (normalizedPath.startsWith(".git/")) return normalizedPath.replace(/^\\.git\\//, "____.git/")', - ' if (normalizedPath === ".idea/.gitignore") return ".idea/.gitignore"', - ' if (normalizedPath.startsWith(".idea/")) return normalizedPath', - ' if (!normalizedPath.startsWith(".")) return normalizedPath', - ' return normalizedPath.replace(/^\\.([^/\\\\]+)/, "____$1")', - '}', - '' - ].join('\n') - ) - - const gitIgnorePath = writePublicDefinition( - tempWorkspace, - PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, - 'dist/\n' - ) - const gitExcludePath = writePublicDefinition( - tempWorkspace, - PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, - '.idea/\n' - ) - const editorConfigPath = writePublicDefinition( - tempWorkspace, - '.editorconfig', - 'root = true\n' - ) - const vscodeSettingsPath = writePublicDefinition( - tempWorkspace, - '.vscode/settings.json', - '{"editor.tabSize": 2}\n' - ) - const vscodeExtensionsPath = writePublicDefinition( - tempWorkspace, - '.vscode/extensions.json', - '{"recommendations":["foo.bar"]}\n' - ) - const zedSettingsPath = writePublicDefinition( - tempWorkspace, - '.zed/settings.json', - '{"tab_size": 2}\n' - ) - const ideaGitIgnorePath = writePublicDefinition( - tempWorkspace, - '.idea/.gitignore', - '/workspace.xml\n' - ) - const ideaProjectPath = writePublicDefinition( - tempWorkspace, - '.idea/codeStyles/Project.xml', - '\n' - ) - const ideaCodeStyleConfigPath = writePublicDefinition( - tempWorkspace, - '.idea/codeStyles/codeStyleConfig.xml', - '\n' - ) - - for (const fileName of AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS) - { writePublicDefinition(tempWorkspace, fileName, `${fileName}\n`) } - - const ctx = createContext(tempWorkspace) - const gitIgnore = new GitIgnoreInputCapability().collect(ctx) - const gitExclude = new GitExcludeInputCapability().collect(ctx) - const editorConfig = new EditorConfigInputCapability().collect(ctx) - const vscode = new VSCodeConfigInputCapability().collect(ctx) - const zed = new ZedConfigInputCapability().collect(ctx) - const jetbrains = new JetBrainsConfigInputCapability().collect(ctx) - const ignoreFiles = new AIAgentIgnoreInputCapability().collect(ctx) - - expect(gitIgnore.globalGitIgnore).toBe('dist/\n') - expect(gitExclude.shadowGitExclude).toBe('.idea/\n') - expect(editorConfig.editorConfigFiles?.[0]?.dir.path).toBe( - editorConfigPath - ) - expect(vscode.vscodeConfigFiles?.map(file => file.dir.path)).toEqual([ - vscodeSettingsPath, - vscodeExtensionsPath - ]) - expect(zed.zedConfigFiles?.map(file => file.dir.path)).toEqual([ - zedSettingsPath - ]) - expect( - jetbrains.jetbrainsConfigFiles?.map(file => file.dir.path) - ).toEqual([ideaProjectPath, ideaCodeStyleConfigPath, ideaGitIgnorePath]) - expect( - ignoreFiles.aiAgentIgnoreConfigFiles?.map(file => file.sourcePath) - ).toEqual( - AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS.map(fileName => - resolvePublicDefinitionPath(aindexDir, fileName)) - ) - expect(gitIgnorePath).toBe( - path.join(aindexDir, 'public', '____gitignore') - ) - expect(gitExcludePath).toBe( - path.join(aindexDir, 'public', '____.git', 'info', 'exclude') - ) - expect(editorConfigPath).toBe( - path.join(aindexDir, 'public', '____editorconfig') - ) - expect(vscodeSettingsPath).toBe( - path.join(aindexDir, 'public', '____vscode', 'settings.json') - ) - expect(vscodeExtensionsPath).toBe( - path.join(aindexDir, 'public', '____vscode', 'extensions.json') - ) - expect(zedSettingsPath).toBe( - path.join(aindexDir, 'public', '____zed', 'settings.json') - ) - expect(ideaGitIgnorePath).toBe( - path.join(aindexDir, 'public', '.idea', '.gitignore') - ) - expect(ideaProjectPath).toBe( - path.join(aindexDir, 'public', '.idea', 'codeStyles', 'Project.xml') - ) - expect(ideaCodeStyleConfigPath).toBe( - path.join( - aindexDir, - 'public', - '.idea', - 'codeStyles', - 'codeStyleConfig.xml' - ) - ) - } finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('passes the configured workspace root into public/proxy.ts', () => { - const tempWorkspace = fs.mkdtempSync( - path.join(os.tmpdir(), 'tnmsc-public-config-nested-aindex-') - ) - - try { - const aindexDir = path.join(tempWorkspace, 'config', 'aindex') - const publicDir = path.join(aindexDir, 'public') - fs.mkdirSync(path.join(publicDir, 'expected'), {recursive: true}) - fs.writeFileSync( - path.join(publicDir, 'proxy.ts'), - [ - 'export default (_logicalPath, ctx) => {', - ` return ctx.workspaceDir === ${JSON.stringify(tempWorkspace)} && ctx.cwd === ${JSON.stringify(tempWorkspace)}`, - ' ? "expected/.gitignore"', - ' : "unexpected/.gitignore"', - '}', - '' - ].join('\n'), - 'utf8' - ) - fs.writeFileSync( - path.join(publicDir, 'expected', '.gitignore'), - 'dist/\n', - 'utf8' - ) - - const ctx = createContext(tempWorkspace, {aindexDir: 'config/aindex'}) - const gitIgnore = new GitIgnoreInputCapability().collect(ctx) - - expect(gitIgnore.globalGitIgnore).toBe('dist/\n') - } finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/inputs/input-readme.test.ts b/cli/src/inputs/input-readme.test.ts deleted file mode 100644 index 9b4eec89..00000000 --- a/cli/src/inputs/input-readme.test.ts +++ /dev/null @@ -1,49 +0,0 @@ -import type {InputCapabilityContext} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it, vi} from 'vitest' -import {mergeConfig} from '../config' -import {ReadmeMdInputCapability} from './input-readme' - -function createContext(tempWorkspace: string, logger: InputCapabilityContext['logger']): InputCapabilityContext { - return { - logger, - fs, - path, - glob, - userConfigOptions: mergeConfig({workspaceDir: tempWorkspace}), - dependencyContext: {} - } as InputCapabilityContext -} - -describe('readme input capability project series validation', () => { - it('fails fast when app, ext, arch, and softwares reuse the same project name', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-readme-series-conflict-')) - const error = vi.fn() - const logger = { - error, - warn: vi.fn(), - info: vi.fn(), - debug: vi.fn(), - trace: vi.fn(), - fatal: vi.fn() - } as InputCapabilityContext['logger'] - - try { - fs.mkdirSync(path.join(tempWorkspace, 'aindex', 'app', 'project-a'), {recursive: true}) - fs.mkdirSync(path.join(tempWorkspace, 'aindex', 'softwares', 'project-a'), {recursive: true}) - - await expect(new ReadmeMdInputCapability().collect(createContext(tempWorkspace, logger))) - .rejects - .toThrow('Readme project series name conflict') - expect(error).toHaveBeenCalledWith(expect.objectContaining({ - code: 'README_PROJECT_SERIES_NAME_CONFLICT' - })) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/inputs/input-readme.ts b/cli/src/inputs/input-readme.ts deleted file mode 100644 index d1fcb11d..00000000 --- a/cli/src/inputs/input-readme.ts +++ /dev/null @@ -1,270 +0,0 @@ -import type {InputCapabilityContext, InputCollectedContext, ReadmeFileKind, ReadmePrompt, RelativePath} from '../plugins/plugin-core' - -import process from 'node:process' - -import {CompilerDiagnosticError, ScopeError} from '@truenine/md-compiler/errors' -import { - collectAindexProjectSeriesProjectNameConflicts, - resolveAindexProjectSeriesConfigs -} from '@/aindex-project-series' -import {getGlobalConfigPath} from '@/ConfigLoader' -import { - buildConfigDiagnostic, - buildFileOperationDiagnostic, - buildPromptCompilerDiagnostic, - diagnosticLines -} from '@/diagnostics' -import {AbstractInputCapability, FilePathKind, PromptKind, README_FILE_KIND_MAP} from '../plugins/plugin-core' -import {assertNoResidualModuleSyntax} from '../plugins/plugin-core/DistPromptGuards' -import {readPromptArtifact} from '../plugins/plugin-core/PromptArtifactCache' -import {formatPromptCompilerDiagnostic} from '../plugins/plugin-core/PromptCompilerDiagnostics' - -const ALL_FILE_KINDS = Object.entries(README_FILE_KIND_MAP) as [ReadmeFileKind, {src: string, out: string}][] - -export class ReadmeMdInputCapability extends AbstractInputCapability { - constructor() { - super('ReadmeMdInputCapability', ['AindexInputCapability']) - } - - async collect(ctx: InputCapabilityContext): Promise> { - const {userConfigOptions: options, logger, fs, path, globalScope} = ctx - const {workspaceDir, aindexDir} = this.resolveBasePaths(options) - const readmePrompts: ReadmePrompt[] = [] - const projectSeries = resolveAindexProjectSeriesConfigs(options) - const projectRefs = projectSeries.flatMap(series => { - const seriesSourceDir = this.resolveAindexPath(series.src, aindexDir) - if (!(fs.existsSync(seriesSourceDir) && fs.statSync(seriesSourceDir).isDirectory())) return [] - - return fs - .readdirSync(seriesSourceDir, {withFileTypes: true}) - .filter(entry => entry.isDirectory()) - .map(entry => ({ - projectName: entry.name, - seriesName: series.name, - seriesDir: path.join(seriesSourceDir, entry.name) - })) - }) - const conflicts = collectAindexProjectSeriesProjectNameConflicts(projectRefs) - if (conflicts.length > 0) { - logger.error(buildConfigDiagnostic({ - code: 'README_PROJECT_SERIES_NAME_CONFLICT', - title: 'Readme project names must be unique across app, ext, arch, and softwares', - reason: diagnosticLines( - 'Readme-family outputs target bare workspace project directories, so app/ext/arch/softwares cannot reuse the same project directory name.', - `Conflicting project names: ${conflicts.map(conflict => conflict.projectName).join(', ')}` - ), - exactFix: diagnosticLines( - 'Rename the conflicting project directory in one of the app/ext/arch/softwares source trees and rerun tnmsc.' - ), - possibleFixes: conflicts.map(conflict => diagnosticLines( - `"${conflict.projectName}" is currently declared in: ${conflict.refs.map(ref => `${ref.seriesName} (${ref.seriesDir})`).join(', ')}` - )), - details: { - aindexDir, - conflicts: conflicts.map(conflict => ({ - projectName: conflict.projectName, - refs: conflict.refs.map(ref => ({ - seriesName: ref.seriesName, - seriesDir: ref.seriesDir - })) - })) - } - })) - - throw new Error('Readme project series name conflict') - } - - await Promise.all(projectSeries.map(async series => { - const aindexProjectsDir = this.resolveAindexPath(series.dist, aindexDir) - if (!(fs.existsSync(aindexProjectsDir) && fs.statSync(aindexProjectsDir).isDirectory())) { - logger.debug('aindex project series directory does not exist', {path: aindexProjectsDir, series: series.name}) - return - } - - try { - const projectEntries = fs - .readdirSync(aindexProjectsDir, {withFileTypes: true}) - .filter(entry => entry.isDirectory()) - .sort((a, b) => a.name.localeCompare(b.name)) - - for (const projectEntry of projectEntries) { - const projectName = projectEntry.name - const projectDir = path.join(aindexProjectsDir, projectName) - - await this.collectReadmeFiles( - ctx, - projectDir, - projectName, - workspaceDir, - '', - readmePrompts, - globalScope - ) - } - } - catch (e) { - logger.error(buildFileOperationDiagnostic({ - code: 'README_PROJECT_SCAN_FAILED', - title: `Failed to scan aindex ${series.name} projects for readme prompts`, - operation: 'scan', - targetKind: `aindex ${series.name} project directory`, - path: aindexProjectsDir, - error: e - })) - } - })) - - readmePrompts.sort((a, b) => { - const projectDiff = a.projectName.localeCompare(b.projectName) - if (projectDiff !== 0) return projectDiff - - const targetDiff = a.targetDir.path.localeCompare(b.targetDir.path) - if (targetDiff !== 0) return targetDiff - - return a.fileKind.localeCompare(b.fileKind) - }) - - return {readmePrompts} - } - - private async collectReadmeFiles( - ctx: InputCapabilityContext, - currentDir: string, - projectName: string, - workspaceDir: string, - relativePath: string, - readmePrompts: ReadmePrompt[], - globalScope: InputCapabilityContext['globalScope'] - ): Promise { - const {fs, path, logger} = ctx - const isRoot = relativePath === '' - - for (const [fileKind, {src}] of ALL_FILE_KINDS) { - const filePath = path.join(currentDir, src) - if (!fs.existsSync(filePath) || !fs.statSync(filePath).isFile()) continue - - try { - let content: string - try { - const artifact = await readPromptArtifact(filePath, { - mode: 'dist', - globalScope - }) - const {content: compiledContent} = artifact - content = compiledContent - assertNoResidualModuleSyntax(content, filePath) - } - catch (e) { - if (e instanceof CompilerDiagnosticError) { - logger.error(buildPromptCompilerDiagnostic({ - code: 'README_PROMPT_COMPILE_FAILED', - title: 'Failed to compile readme-family prompt', - diagnosticText: formatPromptCompilerDiagnostic(e, { - operation: 'Failed to compile readme-family prompt.', - promptKind: 'readme-family', - logicalName: `${projectName}/${src}`, - distPath: filePath - }), - details: { - promptKind: 'readme-family', - distPath: filePath, - projectName, - fileKind - } - })) - if (e instanceof ScopeError) { - const globalConfigPath = getGlobalConfigPath() - logger.error(buildConfigDiagnostic({ - code: 'README_SCOPE_VARIABLES_MISSING', - title: 'Readme-family prompt references missing config variables', - reason: diagnosticLines( - `The readme-family prompt uses scope variables that are not defined in "${globalConfigPath}".` - ), - configPath: globalConfigPath, - exactFix: diagnosticLines( - `Define the missing variables in "${globalConfigPath}" and rerun tnmsc.` - ), - details: { - promptPath: filePath, - errorMessage: e.message - } - })) - } - process.exit(1) - } - throw e - } - - // Readme-family outputs intentionally land in /. - // Cross-series duplicate project names are rejected earlier to keep this - // workspace mapping deterministic and overwrite-free. - const targetPath = isRoot ? projectName : path.join(projectName, relativePath) - - const targetDir: RelativePath = { - pathKind: FilePathKind.Relative, - path: targetPath, - basePath: workspaceDir, - getDirectoryName: () => isRoot ? projectName : path.basename(relativePath), - getAbsolutePath: () => path.resolve(workspaceDir, targetPath) - } - - const dir: RelativePath = { - pathKind: FilePathKind.Relative, - path: path.dirname(filePath), - basePath: workspaceDir, - getDirectoryName: () => path.basename(path.dirname(filePath)), - getAbsolutePath: () => path.dirname(filePath) - } - - readmePrompts.push({ - type: PromptKind.Readme, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - projectName, - targetDir, - isRoot, - fileKind, - markdownContents: [], - dir - }) - } - catch (e) { - logger.warn(buildFileOperationDiagnostic({ - code: 'README_PROMPT_READ_FAILED', - title: 'Failed to read readme-family file', - operation: 'read', - targetKind: 'readme-family prompt file', - path: filePath, - error: e, - details: { - fileKind - } - })) - } - } - - try { - const entries = fs.readdirSync(currentDir, {withFileTypes: true}) - - for (const entry of entries) { - if (entry.isDirectory()) { - const subRelativePath = isRoot ? entry.name : path.join(relativePath, entry.name) - const subDir = path.join(currentDir, entry.name) - - await this.collectReadmeFiles(ctx, subDir, projectName, workspaceDir, subRelativePath, readmePrompts, globalScope) - } - } - } - catch (e) { - logger.warn(buildFileOperationDiagnostic({ - code: 'README_DIRECTORY_SCAN_FAILED', - title: 'Failed to scan readme-family directory', - operation: 'scan', - targetKind: 'readme-family directory', - path: currentDir, - error: e - })) - } - } -} diff --git a/cli/src/inputs/input-rule.test.ts b/cli/src/inputs/input-rule.test.ts deleted file mode 100644 index a91b2655..00000000 --- a/cli/src/inputs/input-rule.test.ts +++ /dev/null @@ -1,93 +0,0 @@ -import type {InputCapabilityContext} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {createLogger} from '../plugins/plugin-core' -import {RuleInputCapability} from './input-rule' - -function createContext(tempWorkspace: string): InputCapabilityContext { - const options = mergeConfig({workspaceDir: tempWorkspace}) - - return { - logger: createLogger('RuleInputCapabilityTest', 'error'), - fs, - path, - glob, - userConfigOptions: options, - dependencyContext: {} - } as InputCapabilityContext -} - -describe('rule input plugin', () => { - it('fails hard when source exists without a compiled dist pair', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-rule-src-test-')) - const aindexDir = path.join(tempWorkspace, 'aindex') - const srcDir = path.join(aindexDir, 'rules', 'qa') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.writeFileSync( - path.join(srcDir, 'boot.src.mdx'), - '---\ndescription: source only\nglobs:\n - "**/*.ts"\n---\nSource only rule', - 'utf8' - ) - - const plugin = new RuleInputCapability() - await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Missing compiled dist prompt') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('loads rules from dist when the source tree is missing', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-rule-dist-only-test-')) - const aindexDir = path.join(tempWorkspace, 'aindex') - const distDir = path.join(aindexDir, 'dist', 'rules', 'qa') - - try { - fs.mkdirSync(distDir, {recursive: true}) - fs.writeFileSync( - path.join(distDir, 'boot.mdx'), - '---\nscope: global\ndescription: Dist only rule\nglobs:\n - "**/*.ts"\n---\nDist only rule', - 'utf8' - ) - - const plugin = new RuleInputCapability() - const result = await plugin.collect(createContext(tempWorkspace)) - - expect(result.rules?.length ?? 0).toBe(1) - expect(result.rules?.[0]?.ruleName).toBe('boot') - expect(result.rules?.[0]?.content).toContain('Dist only rule') - expect(result.rules?.[0]?.scope).toBe('global') - expect(result.rules?.[0]?.globs).toEqual(['**/*.ts']) - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('rejects workspace as an unsupported rule scope', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-rule-workspace-scope-test-')) - const aindexDir = path.join(tempWorkspace, 'aindex') - const distDir = path.join(aindexDir, 'dist', 'rules', 'qa') - - try { - fs.mkdirSync(distDir, {recursive: true}) - fs.writeFileSync( - path.join(distDir, 'boot.mdx'), - '---\nscope: workspace\ndescription: Dist only rule\nglobs:\n - "**/*.ts"\n---\nDist only rule', - 'utf8' - ) - - const plugin = new RuleInputCapability() - await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Field "scope" must be "project" or "global"') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/inputs/input-rule.ts b/cli/src/inputs/input-rule.ts deleted file mode 100644 index e0810657..00000000 --- a/cli/src/inputs/input-rule.ts +++ /dev/null @@ -1,103 +0,0 @@ -import type { - InputCapabilityContext, - InputCollectedContext, - RulePrompt, - RuleScope, - RuleYAMLFrontMatter -} from '../plugins/plugin-core' -import {buildFileOperationDiagnostic} from '@/diagnostics' -import { - AbstractInputCapability, - createLocalizedPromptReader, - FilePathKind, - PromptKind, - SourceLocaleExtensions, - validateRuleMetadata -} from '../plugins/plugin-core' - -export class RuleInputCapability extends AbstractInputCapability { - constructor() { - super('RuleInputCapability') - } - - override async collect(ctx: InputCapabilityContext): Promise> { - const {userConfigOptions: options, logger, path, fs, globalScope} = ctx - const resolvedPaths = this.resolveBasePaths(options) - - const srcDir = this.resolveAindexPath(options.aindex.rules.src, resolvedPaths.aindexDir) - const distDir = this.resolveAindexPath(options.aindex.rules.dist, resolvedPaths.aindexDir) - - const reader = createLocalizedPromptReader(fs, path, logger, globalScope) - - const {prompts: localizedRulesFromSrc, errors} = await reader.readFlatFiles( - srcDir, - distDir, - { - kind: PromptKind.Rule, - localeExtensions: SourceLocaleExtensions, - hydrateSourceContents: false, - isDirectoryStructure: false, - createPrompt: async (content, _locale, name, metadata) => { - const yamlFrontMatter = metadata as RuleYAMLFrontMatter | undefined - const filePath = path.join(distDir, `${name}.mdx`) - if (yamlFrontMatter != null) { - const validation = validateRuleMetadata(yamlFrontMatter as Record, filePath) - if (!validation.valid) throw new Error(validation.errors.join('\n')) - } - const globs = yamlFrontMatter?.globs ?? [] - const scope: RuleScope = yamlFrontMatter?.scope ?? 'project' - const seriName = yamlFrontMatter?.seriName as string | undefined - const normalizedName = name.replaceAll('\\', '/') // Normalize path separator for cross-platform compatibility - const prefix = normalizedName.includes('/') ? normalizedName.split('/')[0] ?? '' : '' - const ruleName = normalizedName.split('/').pop() ?? normalizedName - - const rulePrompt = { - type: PromptKind.Rule, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: `${name}.mdx`, - basePath: distDir, - getDirectoryName: () => ruleName, - getAbsolutePath: () => filePath - }, - prefix, - ruleName, - globs, - scope, - markdownContents: [] - } as RulePrompt - - if (yamlFrontMatter != null) Object.assign(rulePrompt, {yamlFrontMatter}) - if (seriName != null) Object.assign(rulePrompt, {seriName}) - - return rulePrompt - } - } - ) - - for (const error of errors) { - logger.warn(buildFileOperationDiagnostic({ - code: 'RULE_PROMPT_READ_FAILED', - title: 'Failed to read rule prompt', - operation: error.phase === 'scan' ? 'scan' : 'read', - targetKind: 'rule prompt', - path: error.path, - error: error.error, - details: { - phase: error.phase - } - })) - } - - if (errors.length > 0) throw new Error(errors.map(error => error.error instanceof Error ? error.error.message : String(error.error)).join('\n')) - - return { - rules: localizedRulesFromSrc - .map(r => r.dist?.prompt) - .filter((rule): rule is RulePrompt => rule != null) - } - } -} diff --git a/cli/src/inputs/input-shared-ignore.ts b/cli/src/inputs/input-shared-ignore.ts deleted file mode 100644 index 0ed59d67..00000000 --- a/cli/src/inputs/input-shared-ignore.ts +++ /dev/null @@ -1,35 +0,0 @@ -import type {AIAgentIgnoreConfigFile, InputCapabilityContext, InputCollectedContext} from '../plugins/plugin-core' -import {AbstractInputCapability} from '../plugins/plugin-core' -import {AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS, resolvePublicDefinitionPath} from '../public-config-paths' - -export class AIAgentIgnoreInputCapability extends AbstractInputCapability { - constructor() { - super('AIAgentIgnoreInputCapability') - } - - collect(ctx: InputCapabilityContext): Partial { - const {workspaceDir, aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) - const results: AIAgentIgnoreConfigFile[] = [] - - for (const fileName of AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS) { - const filePath = resolvePublicDefinitionPath(aindexDir, fileName, { - command: ctx.runtimeCommand, - workspaceDir - }) - if (!ctx.fs.existsSync(filePath)) { - this.log.debug({action: 'collect', message: 'Ignore file not found', path: filePath}) - continue - } - const content = ctx.fs.readFileSync(filePath, 'utf8') - if (content.length === 0) { - this.log.debug({action: 'collect', message: 'Ignore file is empty', path: filePath}) - continue - } - results.push({fileName, content, sourcePath: filePath}) - this.log.debug({action: 'collect', message: 'Loaded ignore file', path: filePath, fileName}) - } - - if (results.length === 0) return {} - return {aiAgentIgnoreConfigFiles: results} - } -} diff --git a/cli/src/inputs/input-subagent.test.ts b/cli/src/inputs/input-subagent.test.ts deleted file mode 100644 index 6567c128..00000000 --- a/cli/src/inputs/input-subagent.test.ts +++ /dev/null @@ -1,224 +0,0 @@ -import type {InputCapabilityContext} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {createLogger} from '../plugins/plugin-core' -import {SubAgentInputCapability} from './input-subagent' - -function createContext(tempWorkspace: string): InputCapabilityContext { - const options = mergeConfig({workspaceDir: tempWorkspace}) - - return { - logger: createLogger('SubAgentInputCapabilityTest', 'error'), - fs, - path, - glob, - userConfigOptions: options, - dependencyContext: {} - } as InputCapabilityContext -} - -describe('subagent input plugin', () => { - it('prefers dist content, and dist also runs mdx-to-md compilation', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-test-')) - const aindexDir = path.join(tempWorkspace, 'aindex') - const srcDir = path.join(aindexDir, 'subagents') - const distDir = path.join(aindexDir, 'dist', 'subagents') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.mkdirSync(distDir, {recursive: true}) - - const srcFile = path.join(srcDir, 'demo.src.mdx') - const distFile = path.join(distDir, 'demo.mdx') - fs.writeFileSync(srcFile, '---\ndescription: src\n---\nSubAgent source', 'utf8') - fs.writeFileSync(distFile, '---\ndescription: dist\n---\nexport const x = 1\n\nSubAgent dist', 'utf8') - - const plugin = new SubAgentInputCapability() - const result = await plugin.collect(createContext(tempWorkspace)) - - expect(result.subAgents?.length ?? 0).toBe(1) - expect(result.subAgents?.[0]?.agentName).toBe('demo') - expect(result.subAgents?.[0]?.canonicalName).toBe('demo') - expect(result.subAgents?.[0]?.content).toContain('SubAgent dist') - expect(result.subAgents?.[0]?.content).not.toContain('SubAgent source') - expect(result.subAgents?.[0]?.content).not.toContain('export const x = 1') - expect(result.subAgents?.[0]?.yamlFrontMatter?.description).toBe('dist') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('extracts directory name as subagent prefix', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-prefix-test-')) - const aindexDir = path.join(tempWorkspace, 'aindex') - const srcDir = path.join(aindexDir, 'subagents', 'qa') - const distDir = path.join(aindexDir, 'dist', 'subagents', 'qa') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.mkdirSync(distDir, {recursive: true}) - - const srcFile = path.join(srcDir, 'boot.src.mdx') - const distFile = path.join(distDir, 'boot.mdx') - fs.writeFileSync(srcFile, '---\ndescription: qa boot src\n---\nSubAgent source', 'utf8') - fs.writeFileSync(distFile, '---\ndescription: qa boot dist\n---\nSubAgent dist', 'utf8') - - const plugin = new SubAgentInputCapability() - const result = await plugin.collect(createContext(tempWorkspace)) - const [subAgent] = result.subAgents ?? [] - - expect(result.subAgents?.length ?? 0).toBe(1) - expect(subAgent?.agentPrefix).toBe('qa') - expect(subAgent?.agentName).toBe('boot') - expect(subAgent?.canonicalName).toBe('qa-boot') - expect(subAgent?.content).toContain('SubAgent dist') - expect(subAgent?.content).not.toContain('SubAgent source') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('keeps rawMdxContent from dist for output-side recompilation', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-rawmdx-test-')) - const aindexDir = path.join(tempWorkspace, 'aindex') - const srcDir = path.join(aindexDir, 'subagents') - const distDir = path.join(aindexDir, 'dist', 'subagents') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.mkdirSync(distDir, {recursive: true}) - - const srcFile = path.join(srcDir, 'demo.src.mdx') - const distFile = path.join(distDir, 'demo.mdx') - fs.writeFileSync(srcFile, '---\ndescription: src\n---\nSubAgent source', 'utf8') - fs.writeFileSync(distFile, '---\ndescription: dist\n---\nexport const x = 1\n\nSubAgent dist', 'utf8') - - const plugin = new SubAgentInputCapability() - const result = await plugin.collect(createContext(tempWorkspace)) - const [subAgent] = result.subAgents ?? [] - - expect(subAgent?.rawMdxContent).toContain('export const x = 1') - expect(subAgent?.content).toContain('SubAgent dist') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('loads subagents from dist when the source tree is missing', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-dist-only-test-')) - const aindexDir = path.join(tempWorkspace, 'aindex') - const distDir = path.join(aindexDir, 'dist', 'subagents') - - try { - fs.mkdirSync(distDir, {recursive: true}) - fs.writeFileSync( - path.join(distDir, 'demo.mdx'), - '---\ndescription: dist only\n---\nDist only subagent', - 'utf8' - ) - - const plugin = new SubAgentInputCapability() - const result = await plugin.collect(createContext(tempWorkspace)) - - expect(result.subAgents?.length ?? 0).toBe(1) - expect(result.subAgents?.[0]?.agentName).toBe('demo') - expect(result.subAgents?.[0]?.canonicalName).toBe('demo') - expect(result.subAgents?.[0]?.content).toContain('Dist only subagent') - expect(result.subAgents?.[0]?.yamlFrontMatter?.description).toBe('dist only') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('fails hard when source exists without a compiled dist pair', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-source-only-test-')) - const srcDir = path.join(tempWorkspace, 'aindex', 'subagents') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.writeFileSync( - path.join(srcDir, 'demo.src.mdx'), - '---\ndescription: source only\n---\nSource only subagent', - 'utf8' - ) - - const plugin = new SubAgentInputCapability() - await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Missing compiled dist prompt') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('rejects workspace as an unsupported subagent scope', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-workspace-scope-test-')) - const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'subagents') - - try { - fs.mkdirSync(distDir, {recursive: true}) - fs.writeFileSync( - path.join(distDir, 'demo.mdx'), - '---\ndescription: dist only\nscope: workspace\n---\nDist only subagent', - 'utf8' - ) - - const plugin = new SubAgentInputCapability() - await expect(plugin.collect(createContext(tempWorkspace))).rejects.toThrow('Field "scope" must be "project" or "global"') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) - - it('warns and ignores authored subagent names', async () => { - const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-name-warning-test-')) - const warnings: string[] = [] - const aindexDir = path.join(tempWorkspace, 'aindex') - const srcDir = path.join(aindexDir, 'subagents', 'qa') - const distDir = path.join(aindexDir, 'dist', 'subagents', 'qa') - - try { - fs.mkdirSync(srcDir, {recursive: true}) - fs.mkdirSync(distDir, {recursive: true}) - - fs.writeFileSync(path.join(srcDir, 'boot.src.mdx'), '---\nname: review-helper\ndescription: src\n---\nSubAgent source', 'utf8') - fs.writeFileSync(path.join(distDir, 'boot.mdx'), '---\nname: review-helper\ndescription: dist\n---\nSubAgent dist', 'utf8') - - const logger = { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: diagnostic => warnings.push(diagnostic.code), - error: () => {}, - fatal: () => {} - } - - const options = mergeConfig({workspaceDir: tempWorkspace}) - const plugin = new SubAgentInputCapability() - const result = await plugin.collect({ - logger, - fs, - path, - glob, - userConfigOptions: options, - dependencyContext: {} - } as InputCapabilityContext) - - const [subAgent] = result.subAgents ?? [] - expect(subAgent?.canonicalName).toBe('qa-boot') - expect('name' in (subAgent?.yamlFrontMatter ?? {})).toBe(false) - expect(warnings).toContain('SUBAGENT_NAME_IGNORED') - } - finally { - fs.rmSync(tempWorkspace, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/inputs/input-subagent.ts b/cli/src/inputs/input-subagent.ts deleted file mode 100644 index ebbc0b06..00000000 --- a/cli/src/inputs/input-subagent.ts +++ /dev/null @@ -1,179 +0,0 @@ -import type { - InputCapabilityContext, - InputCollectedContext, - Locale, - SubAgentPrompt, - SubAgentYAMLFrontMatter -} from '../plugins/plugin-core' -import {buildConfigDiagnostic, buildFileOperationDiagnostic, diagnosticLines} from '@/diagnostics' -import { - AbstractInputCapability, - createLocalizedPromptReader, - deriveSubAgentIdentity, - FilePathKind, - PromptKind, - SourceLocaleExtensions, - validateSubAgentMetadata - -} from '../plugins/plugin-core' - -export class SubAgentInputCapability extends AbstractInputCapability { - constructor() { - super('SubAgentInputCapability') - } - - private createSubAgentPrompt( - content: string, - _locale: Locale, - name: string, - srcDir: string, - distDir: string, - ctx: InputCapabilityContext, - metadata?: Record, - warnedDerivedNames?: Set - ): SubAgentPrompt { - const {fs, logger, path} = ctx - const {agentPrefix, agentName, canonicalName} = deriveSubAgentIdentity(name) - - const filePath = path.join(distDir, `${name}.mdx`) - const entryName = `${name}.mdx` - const sourceFilePath = fs.existsSync(path.join(srcDir, `${name}.src.mdx`)) - ? path.join(srcDir, `${name}.src.mdx`) - : filePath - const yamlFrontMatter = metadata == null - ? void 0 - : (() => { - const frontMatter = {...metadata} - const authoredName = frontMatter['name'] - - if (typeof authoredName === 'string' && authoredName.trim().length > 0 && warnedDerivedNames?.has(sourceFilePath) !== true) { - warnedDerivedNames?.add(sourceFilePath) - logger.warn(buildConfigDiagnostic({ - code: 'SUBAGENT_NAME_IGNORED', - title: 'Sub-agent authored name is ignored', - reason: diagnosticLines( - `tnmsc ignores the authored sub-agent name "${authoredName}" in favor of the derived path name "${canonicalName}".` - ), - configPath: sourceFilePath, - exactFix: diagnosticLines( - 'Remove the `name` field from the sub-agent front matter or exported metadata.', - 'Rename the sub-agent directory or file if you need a different sub-agent name.' - ), - details: { - authoredName, - derivedName: canonicalName, - logicalName: name - } - })) - } - - delete frontMatter['name'] - return frontMatter as SubAgentYAMLFrontMatter - })() - - const prompt: SubAgentPrompt = { - type: PromptKind.SubAgent, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: entryName, - basePath: distDir, - getDirectoryName: () => entryName.replace(/\.mdx$/, ''), - getAbsolutePath: () => filePath - }, - ...agentPrefix != null && {agentPrefix}, - agentName, - canonicalName - } as SubAgentPrompt - - if (yamlFrontMatter == null) return prompt - - const validation = validateSubAgentMetadata(yamlFrontMatter as Record, filePath) - if (!validation.valid) throw new Error(validation.errors.join('\n')) - - Object.assign(prompt, {yamlFrontMatter}) - if (yamlFrontMatter.seriName != null) Object.assign(prompt, {seriName: yamlFrontMatter.seriName}) - return prompt - } - - override async collect(ctx: InputCapabilityContext): Promise> { - const {userConfigOptions: options, logger, path, fs, globalScope} = ctx - const resolvedPaths = this.resolveBasePaths(options) - - const srcDir = this.resolveAindexPath(options.aindex.subAgents.src, resolvedPaths.aindexDir) - const distDir = this.resolveAindexPath(options.aindex.subAgents.dist, resolvedPaths.aindexDir) - - logger.debug('SubAgentInputCapability collecting', { - srcDir, - distDir, - aindexDir: resolvedPaths.aindexDir - }) - - const reader = createLocalizedPromptReader(fs, path, logger, globalScope) - const warnedDerivedNames = new Set() - - const {prompts: localizedSubAgents, errors} = await reader.readFlatFiles( - srcDir, - distDir, - { - kind: PromptKind.SubAgent, - localeExtensions: SourceLocaleExtensions, - hydrateSourceContents: false, - isDirectoryStructure: false, - createPrompt: (content, locale, name, metadata) => this.createSubAgentPrompt( - content, - locale, - name, - srcDir, - distDir, - ctx, - metadata, - warnedDerivedNames - ) - } - ) - - logger.debug('SubAgentInputCapability read complete', { - subAgentCount: localizedSubAgents.length, - errorCount: errors.length - }) - - for (const error of errors) { - logger.warn(buildFileOperationDiagnostic({ - code: 'SUBAGENT_PROMPT_READ_FAILED', - title: 'Failed to read sub-agent prompt', - operation: error.phase === 'scan' ? 'scan' : 'read', - targetKind: 'sub-agent prompt', - path: error.path, - error: error.error, - details: { - phase: error.phase - } - })) - } - - if (errors.length > 0) throw new Error(errors.map(error => error.error instanceof Error ? error.error.message : String(error.error)).join('\n')) - - const flatSubAgents: SubAgentPrompt[] = [] - for (const localized of localizedSubAgents) { - const distContent = localized.dist - if (distContent?.prompt == null) continue - - const {prompt: distPrompt, rawMdx} = distContent - flatSubAgents.push(rawMdx == null - ? distPrompt - : {...distPrompt, rawMdxContent: rawMdx}) - } - - logger.debug('SubAgentInputCapability flattened subAgents', { - count: flatSubAgents.length, - agents: flatSubAgents.map(a => a.canonicalName) - }) - - return { - subAgents: flatSubAgents - } - } -} diff --git a/cli/src/inputs/input-vscode-config.ts b/cli/src/inputs/input-vscode-config.ts deleted file mode 100644 index 5476237e..00000000 --- a/cli/src/inputs/input-vscode-config.ts +++ /dev/null @@ -1,27 +0,0 @@ -import type {InputCapabilityContext, InputCollectedContext, ProjectIDEConfigFile} from '../plugins/plugin-core' -import {AbstractInputCapability, IDEKind} from '../plugins/plugin-core' -import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' - -export class VSCodeConfigInputCapability extends AbstractInputCapability { - constructor() { - super('VSCodeConfigInputCapability') - } - - collect(ctx: InputCapabilityContext): Partial { - const {userConfigOptions, fs} = ctx - const {workspaceDir, aindexDir} = this.resolveBasePaths(userConfigOptions) - - const files = ['.vscode/settings.json', '.vscode/extensions.json'] - const vscodeConfigFiles: ProjectIDEConfigFile[] = [] - - for (const relativePath of files) { - const file = readPublicIdeConfigDefinitionFile(IDEKind.VSCode, relativePath, aindexDir, fs, { - command: ctx.runtimeCommand, - workspaceDir - }) - if (file != null) vscodeConfigFiles.push(file) - } - - return {vscodeConfigFiles} - } -} diff --git a/cli/src/inputs/input-workspace.ts b/cli/src/inputs/input-workspace.ts deleted file mode 100644 index dfc10863..00000000 --- a/cli/src/inputs/input-workspace.ts +++ /dev/null @@ -1,28 +0,0 @@ -import type {InputCapabilityContext, InputCollectedContext, Workspace} from '../plugins/plugin-core' -import * as path from 'node:path' -import {AbstractInputCapability, FilePathKind} from '../plugins/plugin-core' - -export class WorkspaceInputCapability extends AbstractInputCapability { - constructor() { - super('WorkspaceInputCapability') - } - - collect(ctx: InputCapabilityContext): Partial { - const {userConfigOptions: options} = ctx - const {workspaceDir, aindexDir} = this.resolveBasePaths(options) - - const workspace: Workspace = { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir) - }, - projects: [] - } - - return { - workspace, - aindexDir - } - } -} diff --git a/cli/src/inputs/input-zed-config.ts b/cli/src/inputs/input-zed-config.ts deleted file mode 100644 index 32642374..00000000 --- a/cli/src/inputs/input-zed-config.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type {InputCapabilityContext, InputCollectedContext, ProjectIDEConfigFile} from '../plugins/plugin-core' -import {AbstractInputCapability, IDEKind} from '../plugins/plugin-core' -import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' - -export class ZedConfigInputCapability extends AbstractInputCapability { - constructor() { - super('ZedConfigInputCapability') - } - - collect(ctx: InputCapabilityContext): Partial { - const {userConfigOptions, fs} = ctx - const {workspaceDir, aindexDir} = this.resolveBasePaths(userConfigOptions) - - const zedConfigFiles: ProjectIDEConfigFile[] = [] - const file = readPublicIdeConfigDefinitionFile(IDEKind.Zed, '.zed/settings.json', aindexDir, fs, { - command: ctx.runtimeCommand, - workspaceDir - }) - if (file != null) zedConfigFiles.push(file) - - return {zedConfigFiles} - } -} diff --git a/cli/src/inputs/runtime.ts b/cli/src/inputs/runtime.ts deleted file mode 100644 index 710fe2f8..00000000 --- a/cli/src/inputs/runtime.ts +++ /dev/null @@ -1,172 +0,0 @@ -import type {MdxGlobalScope} from '@truenine/md-compiler/globals' -import type { - InputCapability, - InputCapabilityContext, - InputCollectedContext, - PluginOptions, - UserConfigFile -} from '@/plugins/plugin-core' - -import * as fs from 'node:fs' -import * as path from 'node:path' -import {createLogger} from '@truenine/logger' -import glob from 'fast-glob' -import { - AIAgentIgnoreInputCapability, - AindexInputCapability, - CommandInputCapability, - EditorConfigInputCapability, - GitExcludeInputCapability, - GitIgnoreInputCapability, - GlobalMemoryInputCapability, - JetBrainsConfigInputCapability, - MarkdownWhitespaceCleanupEffectInputCapability, - OrphanFileCleanupEffectInputCapability, - ProjectPromptInputCapability, - ReadmeMdInputCapability, - RuleInputCapability, - SkillDistCleanupEffectInputCapability, - SkillInputCapability, - SubAgentInputCapability, - VSCodeConfigInputCapability, - WorkspaceInputCapability, - ZedConfigInputCapability -} from '@/inputs' -import {extractUserArgs, parseArgs} from '@/pipeline/CliArgumentParser' -import {buildDependencyContext, mergeContexts} from '@/pipeline/ContextMerger' -import {topologicalSort} from '@/pipeline/DependencyResolver' -import {GlobalScopeCollector, ScopePriority, ScopeRegistry} from '@/plugins/plugin-core/GlobalScopeCollector' - -export interface InputRuntimeOptions { - readonly pipelineArgs?: readonly string[] - readonly userConfigOptions: Required - readonly userConfig?: UserConfigFile - readonly capabilities?: readonly InputCapability[] - readonly includeBuiltinEffects?: boolean -} - -function createBuiltinInputEffectCapabilities(): InputCapability[] { - return [ - new SkillDistCleanupEffectInputCapability(), - new OrphanFileCleanupEffectInputCapability(), - new MarkdownWhitespaceCleanupEffectInputCapability() - ] -} - -function createBuiltinInputReaderCapabilities(): InputCapability[] { - return [ - new WorkspaceInputCapability(), - new AindexInputCapability(), - new VSCodeConfigInputCapability(), - new ZedConfigInputCapability(), - new JetBrainsConfigInputCapability(), - new EditorConfigInputCapability(), - new SkillInputCapability(), - new CommandInputCapability(), - new SubAgentInputCapability(), - new RuleInputCapability(), - new GlobalMemoryInputCapability(), - new ProjectPromptInputCapability(), - new ReadmeMdInputCapability(), - new GitIgnoreInputCapability(), - new GitExcludeInputCapability(), - new AIAgentIgnoreInputCapability() - ] -} - -export function resolveRuntimeCommand( - pipelineArgs?: readonly string[] -): InputCapabilityContext['runtimeCommand'] { - if (pipelineArgs == null || pipelineArgs.length === 0) return 'execute' - - const filteredArgs = pipelineArgs.filter((arg): arg is string => arg != null) - const userArgs = extractUserArgs(filteredArgs) - const args = parseArgs(userArgs) - - if (args.helpFlag || args.versionFlag || args.unknownCommand != null) return void 0 - if (args.subcommand === 'clean') return 'clean' - if (args.subcommand === 'plugins') return 'plugins' - if (args.subcommand === 'dry-run' || args.dryRun) return 'dry-run' - if (args.subcommand == null) return 'execute' - return void 0 -} - -export async function collectInputContext( - options: InputRuntimeOptions -): Promise> { - const { - pipelineArgs, - userConfigOptions, - userConfig, - capabilities, - includeBuiltinEffects = true - } = options - const logger = createLogger('InputRuntime', userConfigOptions.logLevel) - const runtimeCommand = resolveRuntimeCommand(pipelineArgs) - const baseCtx: Omit = { - logger, - userConfigOptions, - fs, - path, - glob - } - - const resolvedCapabilities = topologicalSort([ - ...includeBuiltinEffects ? createBuiltinInputEffectCapabilities() : [], - ...capabilities ?? createBuiltinInputReaderCapabilities() - ]) - const globalScopeCollector = new GlobalScopeCollector({userConfig}) - const globalScope: MdxGlobalScope = globalScopeCollector.collect() - const scopeRegistry = new ScopeRegistry() - scopeRegistry.setGlobalScope(globalScope) - - logger.debug('global scope collected', { - osInfo: { - platform: globalScope.os.platform, - arch: globalScope.os.arch, - shellKind: globalScope.os.shellKind - }, - hasProfile: Object.keys(globalScope.profile).length > 0, - hasTool: Object.keys(globalScope.tool).length > 0 - }) - - const outputsByCapability = new Map>() - let accumulatedContext: Partial = {} - - for (const capability of resolvedCapabilities) { - const dependencyContext = buildDependencyContext(capability, outputsByCapability, mergeContexts) - const ctx: InputCapabilityContext = { - ...baseCtx, - dependencyContext, - ...runtimeCommand != null ? {runtimeCommand} : {}, - globalScope, - scopeRegistry - } - - const capabilityWithEffects = capability as InputCapability & { - executeEffects?: (ctx: InputCapabilityContext, dryRun: boolean) => Promise - } - if (capabilityWithEffects.executeEffects != null) await capabilityWithEffects.executeEffects(ctx, false) - - const output = await capability.collect(ctx) - outputsByCapability.set(capability.name, output) - accumulatedContext = mergeContexts(accumulatedContext, output) - - const capabilityWithScopes = capability as InputCapability & { - getRegisteredScopes?: () => readonly {namespace: string, values: Record}[] - } - if (capabilityWithScopes.getRegisteredScopes != null) { - const registeredScopes = capabilityWithScopes.getRegisteredScopes() - for (const {namespace, values} of registeredScopes) { - scopeRegistry.register(namespace, values, ScopePriority.PluginRegistered) - logger.debug('input capability scope registered', { - capability: capability.name, - namespace, - keys: Object.keys(values) - }) - } - } - } - - return accumulatedContext -} diff --git a/cli/src/lib.rs b/cli/src/lib.rs deleted file mode 100644 index 01062b49..00000000 --- a/cli/src/lib.rs +++ /dev/null @@ -1,546 +0,0 @@ -//! tnmsc library — exposes core functionality for GUI backend direct invocation. -//! -//! Pure Rust commands: version, load_config, config_show -//! Bridge commands (Node.js): run_bridge_command - -pub mod bridge; -pub mod commands; -pub mod core; -pub(crate) mod diagnostic_helpers; - -use std::path::Path; - -use serde::{Deserialize, Serialize}; - -/// Unified error type for CLI library API. -#[derive(Debug, thiserror::Error)] -pub enum CliError { - #[error("Node.js not found in PATH")] - NodeNotFound, - - #[error("Plugin runtime not found: {0}")] - PluginRuntimeNotFound(String), - - #[error("Node.js process failed with exit code {code}: {stderr}")] - NodeProcessFailed { code: i32, stderr: String }, - - #[error("Config error: {0}")] - ConfigError(String), - - #[error("IO error: {0}")] - IoError(#[from] std::io::Error), - - #[error("Serialization error: {0}")] - SerializationError(#[from] serde_json::Error), -} - -/// Captured output from a bridge command (execute, dry-run, clean, plugins). -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct BridgeCommandResult { - pub stdout: String, - pub stderr: String, - pub exit_code: i32, -} - -// --------------------------------------------------------------------------- -// Public API functions -// --------------------------------------------------------------------------- - -/// Return the CLI crate version string. -pub fn version() -> &'static str { - env!("CARGO_PKG_VERSION") -} - -/// Load and merge configuration from the canonical global config path. -pub fn load_config(cwd: &Path) -> Result { - core::config::ConfigLoader::with_defaults() - .try_load(cwd) - .map_err(CliError::ConfigError) -} - -/// Return the merged global configuration as a pretty-printed JSON string. -pub fn config_show(cwd: &Path) -> Result { - let result = core::config::ConfigLoader::with_defaults() - .try_load(cwd) - .map_err(CliError::ConfigError)?; - serde_json::to_string_pretty(&result.config).map_err(CliError::from) -} - -/// Execute a bridge command (execute, dry-run, clean, plugins) via Node.js subprocess. -/// -/// The subprocess output is captured (piped) and returned as a [`BridgeCommandResult`]. -pub fn run_bridge_command( - subcommand: &str, - cwd: &Path, - json_mode: bool, - extra_args: &[&str], -) -> Result { - bridge::node::run_node_command_captured(subcommand, cwd, json_mode, extra_args) -} - -// --------------------------------------------------------------------------- -// Property-based tests — Property 1: Library API returns typed results -// --------------------------------------------------------------------------- -#[cfg(test)] -mod property_tests { - use super::*; - use proptest::prelude::*; - use tempfile::TempDir; - - /// **Validates: Requirements 1.4, 1.5** - /// **Feature: gui-direct-cli-crate, Property 1: Library API returns typed results** - - // ---- version() ---- - - #[test] - fn version_returns_cargo_pkg_version() { - let v = version(); - assert!(!v.is_empty(), "version() must return a non-empty string"); - assert_eq!(v, env!("CARGO_PKG_VERSION")); - } - - proptest! { - /// version() always returns a non-empty &'static str that matches CARGO_PKG_VERSION, - /// regardless of how many times it is called. - #[test] - fn prop_version_always_non_empty(_seed in 0u64..10000) { - let v = version(); - prop_assert!(!v.is_empty(), "version() returned empty string"); - prop_assert_eq!(v, env!("CARGO_PKG_VERSION")); - } - - // ---- load_config(cwd) ---- - - /// For any temporary directory, load_config returns Ok(MergedConfigResult) - /// because ConfigLoader has defaults and doesn't fail on missing config files. - #[test] - fn prop_load_config_returns_ok_for_any_tempdir(_seed in 0u64..100) { - let tmp = TempDir::new().expect("failed to create tempdir"); - let result = load_config(tmp.path()); - prop_assert!(result.is_ok(), "load_config should return Ok for any valid dir, got: {:?}", result.err()); - let merged = result.unwrap(); - prop_assert!(merged.sources.is_empty() || !merged.sources.is_empty(), - "sources should be a valid Vec"); - } - - // ---- config_show(cwd) ---- - - /// For any temporary directory, config_show returns Ok(String) containing valid JSON. - #[test] - fn prop_config_show_returns_valid_json(_seed in 0u64..100) { - let tmp = TempDir::new().expect("failed to create tempdir"); - let result = config_show(tmp.path()); - prop_assert!(result.is_ok(), "config_show should return Ok, got: {:?}", result.err()); - let json_str = result.unwrap(); - let parsed: Result = serde_json::from_str(&json_str); - prop_assert!(parsed.is_ok(), "config_show output should be valid JSON, got: {}", json_str); - } - - // ---- BridgeCommandResult structural property ---- - - /// BridgeCommandResult fields are typed and accessible for any combination of - /// stdout/stderr/exit_code values. Verifies Property 1 for the result struct - /// without spawning any processes. - /// - /// **Feature: gui-direct-cli-crate, Property 1: Library API returns typed results** - #[test] - fn prop_bridge_command_result_fields_are_typed( - stdout in ".*", - stderr in ".*", - exit_code in proptest::num::i32::ANY, - ) { - let bcr = BridgeCommandResult { - stdout: stdout.clone(), - stderr: stderr.clone(), - exit_code, - }; - // Typed field access — verifies the struct is not a raw string wrapper - let s: &str = &bcr.stdout; - let e: &str = &bcr.stderr; - let c: i32 = bcr.exit_code; - prop_assert_eq!(s, stdout.as_str()); - prop_assert_eq!(e, stderr.as_str()); - prop_assert_eq!(c, exit_code); - // Verify round-trip JSON serialization (camelCase fields per serde rename_all) - let json = serde_json::to_string(&bcr).expect("BridgeCommandResult must serialize"); - prop_assert!(json.contains("\"stdout\""), "JSON must contain stdout field"); - prop_assert!(json.contains("\"stderr\""), "JSON must contain stderr field"); - prop_assert!(json.contains("\"exitCode\""), "JSON must contain exitCode field (camelCase)"); - // Verify round-trip deserialization - let bcr2: BridgeCommandResult = - serde_json::from_str(&json).expect("BridgeCommandResult must deserialize"); - prop_assert_eq!(bcr2.stdout.as_str(), stdout.as_str()); - prop_assert_eq!(bcr2.stderr.as_str(), stderr.as_str()); - prop_assert_eq!(bcr2.exit_code, exit_code); - } - } - - // ---- CliError pattern matching exhaustiveness ---- - - #[test] - fn cli_error_variants_are_matchable() { - let errors: Vec = vec![ - CliError::NodeNotFound, - CliError::PluginRuntimeNotFound("test".into()), - CliError::NodeProcessFailed { - code: 1, - stderr: "fail".into(), - }, - CliError::ConfigError("bad config".into()), - CliError::IoError(std::io::Error::new(std::io::ErrorKind::NotFound, "test")), - CliError::SerializationError(serde_json::from_str::("invalid").unwrap_err()), - ]; - - for err in &errors { - match err { - CliError::NodeNotFound => assert!(err.to_string().contains("Node.js")), - CliError::PluginRuntimeNotFound(msg) => assert!(!msg.is_empty()), - CliError::NodeProcessFailed { code, stderr } => { - assert_eq!(*code, 1); - assert!(!stderr.is_empty()); - } - CliError::ConfigError(msg) => assert!(!msg.is_empty()), - CliError::IoError(e) => assert!(!e.to_string().is_empty()), - CliError::SerializationError(e) => assert!(!e.to_string().is_empty()), - } - } - } - - /// Single environment probe: verifies run_bridge_command returns a typed Result. - /// Runs once (not in proptest) to avoid spawning Node.js hundreds of times. - /// If Node.js is not found, returns NodeNotFound. - /// If plugin-runtime.mjs is not found, returns PluginRuntimeNotFound. - /// Both are typed CliError variants — no panics, no raw strings. - /// - /// **Feature: gui-direct-cli-crate, Property 1: Library API returns typed results** - #[test] - fn run_bridge_command_returns_typed_result_or_typed_error() { - // Only probe the environment — do not spawn a real subcommand that may hang. - // We check find_node/find_plugin_runtime directly to verify the typed error path. - let node_available = bridge::node::find_node().is_some(); - let runtime_available = bridge::node::find_plugin_runtime().is_some(); - - if !node_available { - // Verify NodeNotFound is returned as a typed error - let tmp = tempfile::TempDir::new().unwrap(); - let result = run_bridge_command("version", tmp.path(), false, &[]); - assert!( - matches!(result, Err(CliError::NodeNotFound)), - "expected NodeNotFound when node is absent, got: {:?}", - result - ); - } else if !runtime_available { - // Verify PluginRuntimeNotFound is returned as a typed error - let tmp = tempfile::TempDir::new().unwrap(); - let result = run_bridge_command("version", tmp.path(), false, &[]); - assert!( - matches!(result, Err(CliError::PluginRuntimeNotFound(_))), - "expected PluginRuntimeNotFound when runtime is absent, got: {:?}", - result - ); - } else { - // Both available — verify the function signature compiles and returns Result - // We do NOT actually spawn a process here to avoid hanging on unknown subcommands. - // The typed return type is verified at compile time. - let _: fn(&str, &Path, bool, &[&str]) -> Result = - run_bridge_command; - } - } -} - -// --------------------------------------------------------------------------- -// Property-based tests — Property 3: Bridge command respects working directory -// --------------------------------------------------------------------------- -#[cfg(test)] -mod property_tests_cwd { - use super::*; - use proptest::prelude::*; - use tempfile::TempDir; - - // Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory - // Validates: Requirement 5.5 - // - // Property: For any valid filesystem path passed as `cwd` to `run_bridge_command`, - // the Node.js subprocess's working directory is set to that path. - // - // Testing strategy: - // - Create a real temporary directory (guarantees the path exists on disk). - // - Call `run_bridge_command` with that directory as `cwd`. - // - The key invariant: the error returned (if any) must be about Node.js or the - // plugin runtime being unavailable — NOT an IoError about the cwd being invalid. - // - An IoError whose kind is NotFound/PermissionDenied on the cwd itself would - // indicate the path was silently ignored or incorrectly passed to `current_dir`. - // - If Node.js IS available and the runtime IS found, the process runs in the - // given directory (verified by the absence of any cwd-related IoError). - - /// Helper: determine whether an error is a cwd-related IoError. - /// - /// `std::process::Command::current_dir` fails at spawn time with an IoError - /// when the directory does not exist or is not accessible. We distinguish - /// this from the expected "Node.js not found" / "runtime not found" errors. - fn is_cwd_io_error(err: &CliError) -> bool { - match err { - CliError::IoError(io_err) => { - // An IoError caused by a bad cwd typically surfaces as NotFound or - // PermissionDenied at the OS level when spawning the child process. - // We conservatively flag *any* IoError as a potential cwd problem - // so the test catches regressions where cwd is not forwarded. - matches!( - io_err.kind(), - std::io::ErrorKind::NotFound | std::io::ErrorKind::PermissionDenied - ) - } - _ => false, - } - } - - /// Probe the environment once so proptest iterations can skip actual spawning - /// when both Node.js and the plugin runtime are present (to avoid hanging). - fn node_available() -> bool { - bridge::node::find_node().is_some() - } - - fn runtime_available() -> bool { - bridge::node::find_plugin_runtime().is_some() - } - - proptest! { - // Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory - // Validates: Requirement 5.5 - // - // For any real temporary directory, calling run_bridge_command with that directory - // as `cwd` must NOT produce a cwd-related IoError. The only acceptable errors are - // NodeNotFound or PluginRuntimeNotFound — both indicate the cwd was accepted and - // forwarded correctly to the subprocess builder; the failure is about runtime - // availability, not about the working directory itself. - // - // When both Node.js and the plugin runtime are present the test verifies the - // property structurally (via source inspection) rather than by actually spawning - // a long-running process, to keep the test suite fast and deterministic. - #[test] - fn prop_bridge_command_cwd_is_forwarded_not_ignored(_seed in 0u64..100u64) { - // Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory - // Validates: Requirement 5.5 - let tmp = TempDir::new().expect("failed to create temp dir"); - let cwd = tmp.path(); - - // The directory must exist before we pass it to run_bridge_command. - prop_assert!(cwd.exists(), "temp dir must exist: {:?}", cwd); - prop_assert!(cwd.is_dir(), "temp dir must be a directory: {:?}", cwd); - - // When both node and runtime are available, spawning "execute" would block - // waiting for the plugin pipeline. Instead we verify the property by - // confirming that run_node_command_captured sets current_dir via the - // PluginRuntimeNotFound path: we use a non-existent runtime path scenario - // by checking the function signature and the source-level guarantee that - // `cmd.current_dir(cwd)` is called before `cmd.output()`. - // - // The structural guarantee is: in run_node_command_captured the line - // cmd.current_dir(cwd); - // appears unconditionally before cmd.output(), so any error from output() - // is never a "cwd was ignored" error. - if node_available() && runtime_available() { - // Verify the function accepts the cwd type without panicking. - // The compile-time type check is the strongest guarantee here. - let _: &std::path::Path = cwd; - // Property holds by construction — current_dir is always set. - return Ok(()); - } - - let result = run_bridge_command("execute", cwd, true, &[]); - - match result { - Ok(_) => { - // Node.js ran successfully in the given cwd — property holds. - } - Err(CliError::NodeNotFound) => { - // Node.js is not installed in this environment. - // The cwd was accepted (passed to Command::current_dir) before the - // NodeNotFound check, so the property still holds. - } - Err(CliError::PluginRuntimeNotFound(_)) => { - // Node.js found but plugin-runtime.mjs is absent. - // Again, cwd was accepted — property holds. - } - Err(CliError::NodeProcessFailed { .. }) => { - // Node.js ran but exited non-zero (e.g. runtime error). - // The process was launched with the correct cwd — property holds. - } - Err(ref err) if is_cwd_io_error(err) => { - // An IoError that looks like a bad working directory — property FAILS. - prop_assert!( - false, - "run_bridge_command returned a cwd-related IoError for an existing \ - directory {:?}: {:?}", - cwd, - err - ); - } - Err(_) => { - // Any other error (ConfigError, SerializationError, non-cwd IoError) - // is unrelated to the working directory — property holds. - } - } - } - } - - /// Deterministic unit test: creates N distinct temp dirs and verifies that - /// run_bridge_command never returns a cwd-related IoError for any of them. - /// - /// Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory - /// Validates: Requirement 5.5 - #[test] - fn bridge_command_accepts_any_existing_directory_as_cwd() { - // Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory - // Validates: Requirement 5.5 - - // Skip actual spawning when both node and runtime are present to avoid blocking. - if node_available() && runtime_available() { - // Structural guarantee: current_dir is set unconditionally in - // run_node_command_captured before cmd.output() is called. - // The property holds by construction. - return; - } - - let dirs: Vec = (0..5) - .map(|_| TempDir::new().expect("failed to create temp dir")) - .collect(); - - for tmp in &dirs { - let cwd = tmp.path(); - assert!(cwd.exists(), "temp dir must exist"); - - let result = run_bridge_command("execute", cwd, true, &[]); - - match result { - Ok(_) - | Err(CliError::NodeNotFound) - | Err(CliError::PluginRuntimeNotFound(_)) - | Err(CliError::NodeProcessFailed { .. }) => { - // All acceptable — cwd was forwarded correctly. - } - Err(ref err) if is_cwd_io_error(err) => { - panic!( - "run_bridge_command returned a cwd-related IoError for existing dir {:?}: {:?}", - cwd, err - ); - } - Err(_) => { - // Other errors are unrelated to cwd — acceptable. - } - } - } - } - - /// Negative test: passing a non-existent path should NOT silently succeed. - /// The error must be either NodeNotFound, PluginRuntimeNotFound, or an IoError - /// (because the OS rejects the non-existent cwd at spawn time). - /// - /// Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory - /// Validates: Requirement 5.5 - #[test] - fn bridge_command_with_nonexistent_cwd_returns_error_not_success() { - // Feature: gui-direct-cli-crate, Property 3: Bridge command respects working directory - // Validates: Requirement 5.5 - let nonexistent = std::path::Path::new("/this/path/does/not/exist/tnmsc_test_8_1"); - assert!(!nonexistent.exists(), "path must not exist for this test"); - - let result = run_bridge_command("execute", nonexistent, true, &[]); - - // Must NOT be Ok — a non-existent cwd should never produce a successful result. - assert!( - result.is_err(), - "run_bridge_command with non-existent cwd must return Err, got Ok" - ); - - // The error must be one of the expected variants — not a silent success. - match result { - Err(CliError::NodeNotFound) => { /* node not installed — acceptable */ } - Err(CliError::PluginRuntimeNotFound(_)) => { /* runtime absent — acceptable */ } - Err(CliError::IoError(_)) => { /* OS rejected the bad cwd — expected */ } - Err(CliError::NodeProcessFailed { .. }) => { /* process ran but failed — acceptable */ - } - Err(other) => { - // ConfigError / SerializationError are unexpected here but not a cwd bug. - // We allow them rather than over-constraining the test. - let _ = other; - } - Ok(_) => unreachable!("already asserted is_err above"), - } - } -} - -// --------------------------------------------------------------------------- -// Cargo workspace configuration validation tests -// --------------------------------------------------------------------------- -#[cfg(test)] -mod cargo_config_tests { - use std::fs; - - fn workspace_root() -> std::path::PathBuf { - let manifest_dir = env!("CARGO_MANIFEST_DIR"); - std::path::Path::new(manifest_dir) - .parent() - .expect("workspace root should exist") - .to_path_buf() - } - - /// Verify cli/Cargo.toml has both [lib] and [[bin]] sections with name = "tnmsc". - #[test] - fn cli_cargo_toml_has_lib_and_bin_targets() { - let cli_toml = workspace_root().join("cli").join("Cargo.toml"); - let content = fs::read_to_string(&cli_toml).expect("cli/Cargo.toml should be readable"); - - assert!( - content.contains("[lib]"), - "cli/Cargo.toml should contain [lib] section" - ); - assert!( - content.contains("[[bin]]"), - "cli/Cargo.toml should contain [[bin]] section" - ); - } - - /// Verify both [lib] and [[bin]] targets use name = "tnmsc". - #[test] - fn cli_cargo_toml_lib_and_bin_crate_name_is_tnmsc() { - let cli_toml = workspace_root().join("cli").join("Cargo.toml"); - let content = fs::read_to_string(&cli_toml).expect("cli/Cargo.toml should be readable"); - - let count = content.matches(r#"name = "tnmsc""#).count(); - assert!( - count >= 2, - "cli/Cargo.toml should have name = \"tnmsc\" for both [lib] and [[bin]], found {} occurrence(s)", - count - ); - } - - /// Verify gui/src-tauri/Cargo.toml contains tnmsc as a workspace dependency. - #[test] - fn gui_cargo_toml_has_tnmsc_workspace_dependency() { - let gui_toml = workspace_root() - .join("gui") - .join("src-tauri") - .join("Cargo.toml"); - let content = - fs::read_to_string(&gui_toml).expect("gui/src-tauri/Cargo.toml should be readable"); - - assert!( - content.contains("tnmsc = { workspace = true }"), - "gui/src-tauri/Cargo.toml should contain `tnmsc = {{ workspace = true }}`" - ); - } - - /// Verify root Cargo.toml declares tnmsc path dependency in [workspace.dependencies]. - #[test] - fn root_cargo_toml_has_tnmsc_workspace_path_dependency() { - let root_toml = workspace_root().join("Cargo.toml"); - let content = fs::read_to_string(&root_toml).expect("root Cargo.toml should be readable"); - - assert!( - content.contains(r#"tnmsc = { path = "cli" }"#), - "root Cargo.toml [workspace.dependencies] should contain `tnmsc = {{ path = \"cli\" }}`" - ); - } -} diff --git a/cli/src/main.rs b/cli/src/main.rs index dc5cbf5b..eccb7bc9 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,4 +1,4 @@ -//! tnmsc — Rust CLI entry point. +//! tnmsc — Rust CLI shell entry point. //! //! Pure Rust commands: help, version, config, config-show //! Bridge commands (Node.js): execute, dry-run, clean, plugins @@ -15,12 +15,10 @@ use cli::{Cli, ResolvedCommand, resolve_command, resolve_log_level}; fn main() -> ExitCode { let cli = Cli::parse(); - // Resolve and set global log level if let Some(level) = resolve_log_level(&cli) { set_global_log_level(level.to_logger_level()); } - // In JSON mode, suppress all log output let json_mode = cli.json; if json_mode { set_global_log_level(tnmsc_logger::LogLevel::Silent); @@ -29,13 +27,10 @@ fn main() -> ExitCode { let command = resolve_command(&cli); match command { - // Pure Rust commands ResolvedCommand::Help => tnmsc::commands::help::execute(), ResolvedCommand::Version => tnmsc::commands::version::execute(), ResolvedCommand::Config(pairs) => tnmsc::commands::config_cmd::execute(&pairs), ResolvedCommand::ConfigShow => tnmsc::commands::config_show::execute(), - - // Bridge commands (delegate to Node.js plugin runtime) ResolvedCommand::Execute => tnmsc::commands::bridge::execute(json_mode), ResolvedCommand::DryRun => tnmsc::commands::bridge::dry_run(json_mode), ResolvedCommand::Clean => tnmsc::commands::bridge::clean(json_mode), diff --git a/cli/src/pipeline/CliArgumentParser.test.ts b/cli/src/pipeline/CliArgumentParser.test.ts deleted file mode 100644 index ad49ff88..00000000 --- a/cli/src/pipeline/CliArgumentParser.test.ts +++ /dev/null @@ -1,9 +0,0 @@ -import {describe, expect, it} from 'vitest' -import {parseArgs, resolveCommand} from './CliArgumentParser' - -describe('cli argument parser', () => { - it('resolves the init subcommand to InitCommand', () => { - const command = resolveCommand(parseArgs(['init'])) - expect(command.name).toBe('init') - }) -}) diff --git a/cli/src/pipeline/CliArgumentParser.ts b/cli/src/pipeline/CliArgumentParser.ts deleted file mode 100644 index ac5c1b60..00000000 --- a/cli/src/pipeline/CliArgumentParser.ts +++ /dev/null @@ -1,265 +0,0 @@ -/** - * CLI Argument Parser Module - * Handles extraction and parsing of command-line arguments - * - * Refactored to use Command Factory pattern for command creation - */ - -import type {Command} from '@/commands/Command' -import {FactoryPriority} from '@/commands/CommandFactory' -import {CommandRegistry} from '@/commands/CommandRegistry' -import {CleanCommandFactory} from '@/commands/factories/CleanCommandFactory' -import {ConfigCommandFactory} from '@/commands/factories/ConfigCommandFactory' -import {DryRunCommandFactory} from '@/commands/factories/DryRunCommandFactory' -import {ExecuteCommandFactory} from '@/commands/factories/ExecuteCommandFactory' -import {HelpCommandFactory} from '@/commands/factories/HelpCommandFactory' -import {InitCommandFactory} from '@/commands/factories/InitCommandFactory' -import {PluginsCommandFactory} from '@/commands/factories/PluginsCommandFactory' -import {UnknownCommandFactory} from '@/commands/factories/UnknownCommandFactory' -import {VersionCommandFactory} from '@/commands/factories/VersionCommandFactory' - -/** - * Valid subcommands for the CLI - */ -export type Subcommand = 'help' | 'version' | 'init' | 'dry-run' | 'clean' | 'config' | 'plugins' - -/** - * Valid log levels for the CLI - */ -export type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' - -/** - * Command line argument parsing result - */ -export interface ParsedCliArgs { - readonly subcommand: Subcommand | undefined - readonly helpFlag: boolean - readonly versionFlag: boolean - readonly dryRun: boolean - readonly jsonFlag: boolean - readonly showFlag: boolean - readonly logLevel: LogLevel | undefined - readonly setOption: readonly [key: string, value: string][] - readonly unknownCommand: string | undefined - readonly positional: readonly string[] - readonly unknown: readonly string[] -} - -/** - * Valid subcommands set for quick lookup - */ -const VALID_SUBCOMMANDS: ReadonlySet = new Set(['help', 'version', 'init', 'dry-run', 'clean', 'config', 'plugins']) - -/** - * Log level flags mapping - */ -const LOG_LEVEL_FLAGS: ReadonlyMap = new Map([ - ['--trace', 'trace'], - ['--debug', 'debug'], - ['--info', 'info'], - ['--warn', 'warn'], - ['--error', 'error'] -]) - -/** - * Log level priority map (lower number = more verbose) - */ -const LOG_LEVEL_PRIORITY: ReadonlyMap = new Map([ - ['trace', 0], - ['debug', 1], - ['info', 2], - ['warn', 3], - ['error', 4] -]) - -/** - * Extract actual user arguments from argv - * Compatible with various execution scenarios: npx, node, tsx, direct execution, etc. - */ -export function extractUserArgs(argv: readonly string[]): string[] { - const args = [...argv] - - const first = args[0] // Skip runtime path (node, bun, deno, etc.) - if (first != null && isRuntimeExecutable(first)) args.shift() - - const second = args[0] // Skip script path or npx package name - if (second != null && isScriptOrPackage(second)) args.shift() - - return args -} - -/** - * Determine if it is a runtime executable - */ -function isRuntimeExecutable(arg: string): boolean { - const runtimes = ['node', 'nodejs', 'bun', 'deno', 'tsx', 'ts-node', 'npx', 'pnpx', 'yarn', 'pnpm'] - const normalized = arg.toLowerCase().replaceAll('\\', '/') - return runtimes.some(rt => { - const pattern = new RegExp(`(?:^|/)${rt}(?:\\.exe|\\.cmd|\\.ps1)?$`, 'i') - return pattern.test(normalized) || normalized === rt - }) -} - -/** - * Determine if it is a script file or package name - */ -function isScriptOrPackage(arg: string): boolean { - if (/\.(?:m?[jt]s|cjs)$/.test(arg)) return true // Script file - if (/[/\\]/.test(arg) && !arg.startsWith('-')) return true // File path containing separators - return /^(?:@[\w-]+\/)?[\w-]+$/.test(arg) && !arg.startsWith('-') // npx executed package name -} - -function pickMoreVerbose(current: LogLevel | undefined, candidate: LogLevel): LogLevel { - if (current == null) return candidate - const currentPriority = LOG_LEVEL_PRIORITY.get(current) ?? 4 - const candidatePriority = LOG_LEVEL_PRIORITY.get(candidate) ?? 4 - return candidatePriority < currentPriority ? candidate : current -} - -/** - * Parse command line arguments into structured result - */ -export function parseArgs(args: readonly string[]): ParsedCliArgs { - const result: { - subcommand: Subcommand | undefined - helpFlag: boolean - versionFlag: boolean - dryRun: boolean - jsonFlag: boolean - showFlag: boolean - logLevel: LogLevel | undefined - setOption: [key: string, value: string][] - unknownCommand: string | undefined - positional: string[] - unknown: string[] - } = { - subcommand: void 0, - helpFlag: false, - versionFlag: false, - dryRun: false, - jsonFlag: false, - showFlag: false, - logLevel: void 0, - setOption: [], - unknownCommand: void 0, - positional: [], - unknown: [] - } - - let firstPositionalProcessed = false - - for (let i = 0; i < args.length; i++) { - const arg = args[i] - if (arg == null) continue - - if (arg === '--') { // Handle -- separator: all following args are positional - result.positional.push(...args.slice(i + 1).filter((a): a is string => a != null)) - break - } - - if (arg.startsWith('--')) { // Long options - const parts = arg.split('=') - const key = parts[0] ?? '' - - const logLevel = LOG_LEVEL_FLAGS.get(key) // Check log level flags - if (logLevel != null) { - result.logLevel = pickMoreVerbose(result.logLevel, logLevel) - continue - } - - switch (key) { - case '--help': result.helpFlag = true; break - case '--version': result.versionFlag = true; break - case '--dry-run': result.dryRun = true; break - case '--json': result.jsonFlag = true; break - case '--show': result.showFlag = true; break - case '--set': - if (parts.length > 1) { // Parse --set key=value from next arg or from = syntax - const keyValue = parts.slice(1).join('=') - const eqIndex = keyValue.indexOf('=') - if (eqIndex > 0) result.setOption.push([keyValue.slice(0, eqIndex), keyValue.slice(eqIndex + 1)]) - } else { - const nextArg = args[i + 1] // Next arg is the value - if (nextArg != null) { - const eqIndex = nextArg.indexOf('=') - if (eqIndex > 0) { - result.setOption.push([nextArg.slice(0, eqIndex), nextArg.slice(eqIndex + 1)]) - i++ // Skip next arg - } - } - } - break - default: result.unknown.push(arg) - } - continue - } - - if (arg.startsWith('-') && arg.length > 1) { // Short options - const flags = arg.slice(1) - for (const flag of flags) { - switch (flag) { - case 'h': result.helpFlag = true; break - case 'v': result.versionFlag = true; break - case 'n': result.dryRun = true; break - case 'j': result.jsonFlag = true; break - default: result.unknown.push(`-${flag}`) - } - } - continue - } - - if (!firstPositionalProcessed) { // First positional argument: check if it's a subcommand - firstPositionalProcessed = true - if (VALID_SUBCOMMANDS.has(arg)) result.subcommand = arg as Subcommand - else { - result.unknownCommand = arg // Unknown first positional is captured as unknownCommand - } - continue - } - - result.positional.push(arg) // Remaining positional arguments - } - - return result -} - -/** - * Singleton instance of the command registry - * Lazy-loaded to ensure factories are only created when needed - */ -let commandRegistry: ReturnType | undefined - -function createDefaultCommandRegistry(): CommandRegistry { - const registry = new CommandRegistry() - - registry.register(new VersionCommandFactory()) // High priority: flag-based commands - registry.register(new HelpCommandFactory()) - registry.register(new UnknownCommandFactory()) - - registry.registerWithPriority(new InitCommandFactory(), FactoryPriority.Subcommand) - registry.registerWithPriority(new DryRunCommandFactory(), FactoryPriority.Subcommand) - registry.registerWithPriority(new CleanCommandFactory(), FactoryPriority.Subcommand) - registry.registerWithPriority(new PluginsCommandFactory(), FactoryPriority.Subcommand) - registry.registerWithPriority(new ConfigCommandFactory(), FactoryPriority.Subcommand) - - registry.registerWithPriority(new ExecuteCommandFactory(), FactoryPriority.Subcommand) // Lowest priority: default/catch-all command - - return registry -} - -/** - * Get or create the command registry singleton - */ -function getCommandRegistry(): ReturnType { - commandRegistry ??= createDefaultCommandRegistry() - return commandRegistry -} - -/** - * Resolve command from parsed CLI arguments using factory pattern - * Delegates command creation to registered factories based on priority - */ -export function resolveCommand(args: ParsedCliArgs): Command { - const registry = getCommandRegistry() - return registry.resolve(args) -} diff --git a/cli/src/pipeline/ContextMerger.ts b/cli/src/pipeline/ContextMerger.ts deleted file mode 100644 index cf7dbd97..00000000 --- a/cli/src/pipeline/ContextMerger.ts +++ /dev/null @@ -1,207 +0,0 @@ -/** - * Context Merger Module - * Handles merging of partial InputCollectedContext objects - */ - -import type {InputCollectedContext, Project, Workspace} from '../plugins/plugin-core' - -/** - * Merge strategy types for context fields - */ -type MergeStrategy = 'concat' | 'override' | 'mergeProjects' - -/** - * Field merge configuration - */ -interface FieldConfig { - readonly strategy: MergeStrategy - readonly getter: (ctx: Partial) => T | undefined -} - -/** - * Merge configuration for all InputCollectedContext fields - */ -const FIELD_CONFIGS: Record> = { - workspace: { - strategy: 'mergeProjects', - getter: ctx => ctx.workspace - }, - vscodeConfigFiles: { - strategy: 'concat', - getter: ctx => ctx.vscodeConfigFiles - }, - zedConfigFiles: { - strategy: 'concat', - getter: ctx => ctx.zedConfigFiles - }, - jetbrainsConfigFiles: { - strategy: 'concat', - getter: ctx => ctx.jetbrainsConfigFiles - }, - editorConfigFiles: { - strategy: 'concat', - getter: ctx => ctx.editorConfigFiles - }, - commands: { - strategy: 'concat', - getter: ctx => ctx.commands - }, - subAgents: { - strategy: 'concat', - getter: ctx => ctx.subAgents - }, - skills: { - strategy: 'concat', - getter: ctx => ctx.skills - }, - rules: { - strategy: 'concat', - getter: ctx => ctx.rules - }, - aiAgentIgnoreConfigFiles: { - strategy: 'concat', - getter: ctx => ctx.aiAgentIgnoreConfigFiles - }, - readmePrompts: { - strategy: 'concat', - getter: ctx => ctx.readmePrompts - }, - globalMemory: { - // Override fields (last one wins) - strategy: 'override', - getter: ctx => ctx.globalMemory - }, - aindexDir: { - strategy: 'override', - getter: ctx => ctx.aindexDir - }, - globalGitIgnore: { - strategy: 'override', - getter: ctx => ctx.globalGitIgnore - }, - shadowGitExclude: { - strategy: 'override', - getter: ctx => ctx.shadowGitExclude - } -} as const - -/** - * Merge two arrays by concatenating them - */ -function mergeArrays( - base: readonly T[] | undefined, - addition: readonly T[] | undefined -): readonly T[] { - if (addition == null) return base ?? [] - if (base == null) return addition - return [...base, ...addition] -} - -/** - * Merge workspace projects. Later projects with the same name replace earlier ones. - */ -function buildProjectMergeKey(project: Project): string { - if (project.isWorkspaceRootProject === true) return `workspace-root:${project.name ?? ''}` - - const promptSeries = project.promptSeries ?? 'workspace' - return `${promptSeries}:${project.name ?? ''}` -} - -function mergeWorkspaceProjects( - base: Workspace, - addition: Workspace -): Workspace { - const projectMap = new Map() - for (const project of base.projects) projectMap.set(buildProjectMergeKey(project), project) - for (const project of addition.projects) - { projectMap.set(buildProjectMergeKey(project), project) } - return { - directory: addition.directory ?? base.directory, - projects: [...projectMap.values()] - } -} - -/** - * Merge workspace fields - */ -function mergeWorkspace( - base: Workspace | undefined, - addition: Workspace | undefined -): Workspace | undefined { - if (addition == null) return base - if (base == null) return addition - return mergeWorkspaceProjects(base, addition) -} - -/** - * Merge a single field based on its strategy - */ -function mergeField( - base: T | undefined, - addition: T | undefined, - strategy: MergeStrategy -): T | undefined { - switch (strategy) { - case 'concat': - return mergeArrays( - base as unknown[], - addition as unknown[] - ) as unknown as T - case 'override': - return addition ?? base - case 'mergeProjects': - return mergeWorkspace( - base as unknown as Workspace, - addition as unknown as Workspace - ) as unknown as T - default: - return addition ?? base - } -} - -/** - * Merge two partial InputCollectedContext objects - * Uses configuration-driven approach to reduce code duplication - */ -export function mergeContexts( - base: Partial, - addition: Partial -): Partial { - const result: Record = {} - - for (const [fieldName, config] of Object.entries(FIELD_CONFIGS)) { - // Process each configured field - const baseValue = config.getter(base) - const additionValue = config.getter(addition) - const mergedValue = mergeField(baseValue, additionValue, config.strategy) - if (mergedValue != null) result[fieldName] = mergedValue - } - - return result as Partial -} - -/** - * Build dependency context from plugin outputs - */ -export function buildDependencyContext( - plugin: {dependsOn?: readonly string[]}, - outputsByPlugin: Map>, - mergeFn: ( - base: Partial, - addition: Partial - ) => Partial -): Partial { - const deps = plugin.dependsOn ?? [] - if (deps.length === 0) return {} - - const visited = new Set() - let merged: Partial = {} - for (const depName of deps) { - if (visited.has(depName)) continue - visited.add(depName) - const depOutput = outputsByPlugin.get(depName) - if (depOutput != null) merged = mergeFn(merged, depOutput) - } - - return merged -} diff --git a/cli/src/pipeline/DependencyResolver.ts b/cli/src/pipeline/DependencyResolver.ts deleted file mode 100644 index 36a5ef38..00000000 --- a/cli/src/pipeline/DependencyResolver.ts +++ /dev/null @@ -1,136 +0,0 @@ -/** - * Dependency Resolver Module - * Handles dependency graph building, validation, and topological sorting - */ - -import type {DependencyNode} from '../plugins/plugin-core' -import {CircularDependencyError, MissingDependencyError} from '../plugins/plugin-core' - -/** - * Find cycle path in dependency graph for error reporting - */ -function findCyclePath( - nodes: readonly T[], - inDegree: Map -): string[] { - const cycleNodes = new Set() // Find nodes that are part of a cycle (in-degree > 0) - for (const [name, degree] of inDegree) { - if (degree > 0) cycleNodes.add(name) - } - - const deps = new Map() // Build dependency map for cycle nodes - for (const node of nodes) { - if (cycleNodes.has(node.name)) { - const nodeDeps = (node.dependsOn ?? []).filter(d => cycleNodes.has(d)) - deps.set(node.name, nodeDeps) - } - } - - const visited = new Set() // DFS to find cycle path - const path: string[] = [] - - const dfs = (node: string): boolean => { - if (path.includes(node)) { - path.push(node) // Found cycle, add closing node to complete the cycle - return true - } - if (visited.has(node)) return false - - visited.add(node) - path.push(node) - - for (const dep of deps.get(node) ?? []) { - if (dfs(dep)) return true - } - - path.pop() - return false - } - - for (const node of cycleNodes) { // Start DFS from any cycle node - if (dfs(node)) { - const lastNode = path.at(-1) - if (lastNode == null) return [...cycleNodes] - const cycleStart = path.indexOf(lastNode) // Extract just the cycle portion - return path.slice(cycleStart) - } - visited.clear() - path.length = 0 - } - - return [...cycleNodes] // Fallback: return all cycle nodes -} - -/** - * Topologically sort dependency nodes based on dependencies. - * Uses Kahn's algorithm with registration order preservation. - */ -export function topologicalSort( - nodes: readonly T[] -): T[] { - const nodeNames = new Set(nodes.map(node => node.name)) // Validate dependencies first - for (const node of nodes) { - const deps = node.dependsOn ?? [] - for (const dep of deps) { - if (!nodeNames.has(dep)) throw new MissingDependencyError(node.name, dep) - } - } - - const nodeMap = new Map() // Build node map for quick lookup - for (const node of nodes) nodeMap.set(node.name, node) - - const inDegree = new Map() // Build in-degree map (count of incoming edges) - for (const node of nodes) inDegree.set(node.name, 0) - - const dependents = new Map() // Build adjacency list (dependents for each node) - for (const node of nodes) dependents.set(node.name, []) - - for (const node of nodes) { // Populate in-degree and dependents - const deps = node.dependsOn ?? [] - for (const dep of deps) { - inDegree.set(node.name, (inDegree.get(node.name) ?? 0) + 1) // Increment in-degree for current node - const depList = dependents.get(dep) ?? [] // Add current node as dependent of dep - depList.push(node.name) - dependents.set(dep, depList) - } - } - - const queue: string[] = [] // Use registration order for initial queue // Initialize queue with nodes that have no dependencies (in-degree = 0) - for (const node of nodes) { - if (inDegree.get(node.name) === 0) queue.push(node.name) - } - - const result: T[] = [] // Process queue - const nodeIndexMap = new Map() // Pre-compute node indices for O(1) lookup - fixes O(n²) complexity - for (let i = 0; i < nodes.length; i++) { - const node = nodes[i] - if (node != null) nodeIndexMap.set(node.name, i) - } - - while (queue.length > 0) { - const current = queue.shift() // Take first element to preserve registration order - if (current == null) continue - - const node = nodeMap.get(current) - if (node == null) continue - result.push(node) - - const currentDependents = dependents.get(current) ?? [] // Process dependents in registration order - const sortedDependents = currentDependents.sort((a, b) => { // Sort dependents by their original registration order - const indexA = nodeIndexMap.get(a) ?? -1 - const indexB = nodeIndexMap.get(b) ?? -1 - return indexA - indexB - }) - - for (const dependent of sortedDependents) { - const newDegree = (inDegree.get(dependent) ?? 0) - 1 - inDegree.set(dependent, newDegree) - if (newDegree === 0) queue.push(dependent) - } - } - - if (result.length === nodes.length) return result // Check for cycle: if not all nodes are in result, there's a cycle - - const cyclePath = findCyclePath(nodes, inDegree) - throw new CircularDependencyError(cyclePath) -} diff --git a/cli/src/pipeline/OutputRuntimeTargets.ts b/cli/src/pipeline/OutputRuntimeTargets.ts deleted file mode 100644 index 0f9aa71b..00000000 --- a/cli/src/pipeline/OutputRuntimeTargets.ts +++ /dev/null @@ -1,57 +0,0 @@ -import type {ILogger, OutputRuntimeTargets} from '@/plugins/plugin-core' - -import * as fs from 'node:fs' -import * as path from 'node:path' -import {getPlatformFixedDir} from '@/core/desk-paths' -import {buildFileOperationDiagnostic} from '@/diagnostics' - -const JETBRAINS_VENDOR_DIR = 'JetBrains' -const JETBRAINS_AIA_DIR = 'aia' -const JETBRAINS_CODEX_DIR = 'codex' -const SUPPORTED_JETBRAINS_IDE_DIR_PREFIXES = [ - 'IntelliJIdea', - 'WebStorm', - 'RustRover', - 'PyCharm', - 'PyCharmCE', - 'PhpStorm', - 'GoLand', - 'CLion', - 'DataGrip', - 'RubyMine', - 'Rider', - 'DataSpell', - 'Aqua' -] as const - -function isSupportedJetBrainsIdeDir(dirName: string): boolean { - return SUPPORTED_JETBRAINS_IDE_DIR_PREFIXES.some(prefix => dirName.startsWith(prefix)) -} - -function discoverJetBrainsCodexDirs(logger: ILogger): readonly string[] { - const baseDir = path.join(getPlatformFixedDir(), JETBRAINS_VENDOR_DIR) - - try { - const dirents = fs.readdirSync(baseDir, {withFileTypes: true}) - return dirents - .filter(dirent => dirent.isDirectory() && isSupportedJetBrainsIdeDir(dirent.name)) - .map(dirent => path.join(baseDir, dirent.name, JETBRAINS_AIA_DIR, JETBRAINS_CODEX_DIR)) - } - catch (error) { - logger.debug(buildFileOperationDiagnostic({ - code: 'JETBRAINS_CODEX_DIRECTORY_SCAN_SKIPPED', - title: 'JetBrains Codex directories are unavailable', - operation: 'scan', - targetKind: 'JetBrains IDE directory', - path: baseDir, - error - })) - return [] - } -} - -export function discoverOutputRuntimeTargets(logger: ILogger): OutputRuntimeTargets { - return { - jetbrainsCodexDirs: discoverJetBrainsCodexDirs(logger) - } -} diff --git a/cli/src/plugin-runtime.ts b/cli/src/plugin-runtime.ts deleted file mode 100644 index c23b0cf8..00000000 --- a/cli/src/plugin-runtime.ts +++ /dev/null @@ -1,128 +0,0 @@ -import type {OutputCleanContext, OutputWriteContext} from './plugins/plugin-core' -/** - * Plugin Runtime Entry Point - * - * Streamlined entry for the Rust CLI binary to spawn via Node.js. - * Accepts a subcommand and flags, executes the plugin pipeline, - * and outputs results to stdout. - * - * Usage: node plugin-runtime.mjs [--json] [--dry-run] - * - * Subcommands: execute, dry-run, clean, plugins - */ -import type {Command, CommandContext} from '@/commands/Command' -import type {PipelineConfig} from '@/config' -import process from 'node:process' -import {CleanCommand} from '@/commands/CleanCommand' -import {DryRunCleanCommand} from '@/commands/DryRunCleanCommand' -import {DryRunOutputCommand} from '@/commands/DryRunOutputCommand' -import {ExecuteCommand} from '@/commands/ExecuteCommand' -import {JsonOutputCommand, toJsonCommandResult} from '@/commands/JsonOutputCommand' -import {PluginsCommand} from '@/commands/PluginsCommand' -import {buildUnhandledExceptionDiagnostic} from '@/diagnostics' -import {discoverOutputRuntimeTargets} from '@/pipeline/OutputRuntimeTargets' -import {createDefaultPluginConfig} from './plugin.config' -import {createLogger, drainBufferedDiagnostics, setGlobalLogLevel} from './plugins/plugin-core' - -/** - * Parse runtime arguments. - * Expected: node plugin-runtime.mjs [--json] [--dry-run] - */ -function parseRuntimeArgs(argv: string[]): {subcommand: string, json: boolean, dryRun: boolean} { - const args = argv.slice(2) // Skip node and script path - let subcommand = 'execute' - let json = false - let dryRun = false - - for (const arg of args) { - if (arg === '--json' || arg === '-j') json = true - else if (arg === '--dry-run' || arg === '-n') dryRun = true - else if (!arg.startsWith('-')) subcommand = arg - } - - return {subcommand, json, dryRun} -} - -/** - * Resolve command from subcommand string. - */ -function resolveRuntimeCommand(subcommand: string, dryRun: boolean): Command { - switch (subcommand) { - case 'execute': return new ExecuteCommand() - case 'dry-run': return new DryRunOutputCommand() - case 'clean': return dryRun ? new DryRunCleanCommand() : new CleanCommand() - case 'plugins': return new PluginsCommand() - default: return new ExecuteCommand() - } -} - -async function main(): Promise { - const {subcommand, json, dryRun} = parseRuntimeArgs(process.argv) - - if (json) setGlobalLogLevel('silent') - - const userPluginConfig: PipelineConfig = await createDefaultPluginConfig(process.argv) - - let command = resolveRuntimeCommand(subcommand, dryRun) - - if (json) { - const selfJsonCommands = new Set(['plugins']) - if (!selfJsonCommands.has(command.name)) command = new JsonOutputCommand(command) - } - - const {context, outputPlugins, userConfigOptions} = userPluginConfig - const logger = createLogger('PluginRuntime') - const runtimeTargets = discoverOutputRuntimeTargets(logger) - - const createCleanContext = (dry: boolean): OutputCleanContext => ({ - logger, - collectedOutputContext: context, - pluginOptions: userConfigOptions, - runtimeTargets, - dryRun: dry - }) - - const createWriteContext = (dry: boolean): OutputWriteContext => ({ - logger, - collectedOutputContext: context, - pluginOptions: userConfigOptions, - runtimeTargets, - dryRun: dry, - registeredPluginNames: Array.from(outputPlugins, plugin => plugin.name) - }) - - const commandCtx: CommandContext = { - logger, - outputPlugins: [...outputPlugins], - collectedOutputContext: context, - userConfigOptions, - createCleanContext, - createWriteContext - } - - const result = await command.execute(commandCtx) - if (!result.success) process.exit(1) -} - -function writeJsonFailure(error: unknown): void { - const errorMessage = error instanceof Error ? error.message : String(error) - const logger = createLogger('plugin-runtime', 'silent') - logger.error(buildUnhandledExceptionDiagnostic('plugin-runtime', error)) - process.stdout.write(`${JSON.stringify(toJsonCommandResult({ - success: false, - filesAffected: 0, - dirsAffected: 0, - message: errorMessage - }, drainBufferedDiagnostics()))}\n`) -} - -main().catch((e: unknown) => { - const {json} = parseRuntimeArgs(process.argv) - if (json) { - writeJsonFailure(e) - process.exit(1) - } - const logger = createLogger('plugin-runtime', 'error') - logger.error(buildUnhandledExceptionDiagnostic('plugin-runtime', e)) - process.exit(1) -}) diff --git a/cli/src/plugin.config.ts b/cli/src/plugin.config.ts deleted file mode 100644 index 8d0dd887..00000000 --- a/cli/src/plugin.config.ts +++ /dev/null @@ -1,58 +0,0 @@ -import type {PipelineConfig} from '@/config' -import process from 'node:process' -import {GenericSkillsOutputPlugin} from '@truenine/plugin-agentskills-compact' -import {AgentsOutputPlugin} from '@truenine/plugin-agentsmd' -import {ClaudeCodeCLIOutputPlugin} from '@truenine/plugin-claude-code-cli' -import {CursorOutputPlugin} from '@truenine/plugin-cursor' -import {DroidCLIOutputPlugin} from '@truenine/plugin-droid-cli' -import {EditorConfigOutputPlugin} from '@truenine/plugin-editorconfig' -import {GeminiCLIOutputPlugin} from '@truenine/plugin-gemini-cli' -import {GitExcludeOutputPlugin} from '@truenine/plugin-git-exclude' -import {JetBrainsAIAssistantCodexOutputPlugin} from '@truenine/plugin-jetbrains-ai-codex' -import {JetBrainsIDECodeStyleConfigOutputPlugin} from '@truenine/plugin-jetbrains-codestyle' -import {CodexCLIOutputPlugin} from '@truenine/plugin-openai-codex-cli' -import {OpencodeCLIOutputPlugin} from '@truenine/plugin-opencode-cli' -import {QoderIDEPluginOutputPlugin} from '@truenine/plugin-qoder-ide' -import {ReadmeMdConfigFileOutputPlugin} from '@truenine/plugin-readme' -import {TraeIDEOutputPlugin} from '@truenine/plugin-trae-ide' -import {VisualStudioCodeIDEConfigOutputPlugin} from '@truenine/plugin-vscode' -import {WarpIDEOutputPlugin} from '@truenine/plugin-warp-ide' -import {WindsurfOutputPlugin} from '@truenine/plugin-windsurf' -import {ZedIDEConfigOutputPlugin} from '@truenine/plugin-zed' -import {defineConfig} from '@/config' -import {TraeCNIDEOutputPlugin} from '@/plugins/plugin-trae-cn-ide' - -export async function createDefaultPluginConfig( - pipelineArgs: readonly string[] = process.argv -): Promise { - return defineConfig({ - pipelineArgs, - pluginOptions: { - plugins: [ - new AgentsOutputPlugin(), - new ClaudeCodeCLIOutputPlugin(), - new CodexCLIOutputPlugin(), - new JetBrainsAIAssistantCodexOutputPlugin(), - new DroidCLIOutputPlugin(), - new GeminiCLIOutputPlugin(), - new GenericSkillsOutputPlugin(), - new OpencodeCLIOutputPlugin(), - new QoderIDEPluginOutputPlugin(), - new TraeIDEOutputPlugin(), - new TraeCNIDEOutputPlugin(), - new WarpIDEOutputPlugin(), - new WindsurfOutputPlugin(), - new CursorOutputPlugin(), - new GitExcludeOutputPlugin(), - - new JetBrainsIDECodeStyleConfigOutputPlugin(), - new EditorConfigOutputPlugin(), - new VisualStudioCodeIDEConfigOutputPlugin(), - new ZedIDEConfigOutputPlugin(), - new ReadmeMdConfigFileOutputPlugin() - ] - } - }) -} - -export default createDefaultPluginConfig diff --git a/cli/src/plugins/AbstractOutputPlugin.test.ts b/cli/src/plugins/AbstractOutputPlugin.test.ts deleted file mode 100644 index 8c6174c8..00000000 --- a/cli/src/plugins/AbstractOutputPlugin.test.ts +++ /dev/null @@ -1,122 +0,0 @@ -import type {OutputWriteContext, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt} from './plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {AbstractOutputPlugin, createLogger, FilePathKind, PromptKind} from './plugin-core' - -class TestDefaultPromptOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('TestDefaultPromptOutputPlugin', { - outputFileName: 'TEST.md', - treatWorkspaceRootProjectAsProject: true - }) - } -} - -function createRootPrompt(content: string): ProjectRootMemoryPrompt { - return { - type: PromptKind.ProjectRootMemory, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Root, - path: '', - getDirectoryName: () => '' - }, - markdownContents: [] - } as ProjectRootMemoryPrompt -} - -function createChildPrompt( - workspaceBase: string, - projectName: string, - relativePath: string, - content: string -): ProjectChildrenMemoryPrompt { - return { - type: PromptKind.ProjectChildrenMemory, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - markdownContents: [], - dir: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: path.join(workspaceBase, projectName), - getDirectoryName: () => path.basename(relativePath), - getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) - }, - workingChildDirectoryPath: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: path.join(workspaceBase, projectName), - getDirectoryName: () => path.basename(relativePath), - getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) - } - } as ProjectChildrenMemoryPrompt -} - -describe('abstractOutputPlugin prompt-source project exclusion', () => { - it('skips prompt-source projects and still writes synthetic workspace root prompts through the default builder', async () => { - const plugin = new TestDefaultPromptOutputPlugin() - const workspaceBase = path.resolve('tmp/abstract-output-plugin') - const ctx = { - logger: createLogger('TestDefaultPromptOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [ - { - name: '__workspace__', - isWorkspaceRootProject: true, - rootMemoryPrompt: createRootPrompt('workspace root') - }, - { - name: 'aindex', - isPromptSourceProject: true, - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'aindex', - basePath: workspaceBase, - getDirectoryName: () => 'aindex', - getAbsolutePath: () => path.join(workspaceBase, 'aindex') - }, - rootMemoryPrompt: createRootPrompt('prompt-source root'), - childMemoryPrompts: [createChildPrompt(workspaceBase, 'aindex', 'commands', 'prompt-source child')] - }, - { - name: 'project-a', - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceBase, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceBase, 'project-a') - }, - rootMemoryPrompt: createRootPrompt('project root'), - childMemoryPrompts: [createChildPrompt(workspaceBase, 'project-a', 'commands', 'project child')] - } - ] - } - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toContain(path.join(workspaceBase, 'TEST.md')) - expect(paths).toContain(path.join(workspaceBase, 'project-a', 'TEST.md')) - expect(paths).toContain(path.join(workspaceBase, 'project-a', 'commands', 'TEST.md')) - expect(paths).not.toContain(path.join(workspaceBase, 'aindex', 'TEST.md')) - expect(paths).not.toContain(path.join(workspaceBase, 'aindex', 'commands', 'TEST.md')) - }) -}) diff --git a/cli/src/plugins/AgentsOutputPlugin.test.ts b/cli/src/plugins/AgentsOutputPlugin.test.ts deleted file mode 100644 index b9d9ffab..00000000 --- a/cli/src/plugins/AgentsOutputPlugin.test.ts +++ /dev/null @@ -1,124 +0,0 @@ -import type {OutputWriteContext, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt} from './plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {AgentsOutputPlugin} from './AgentsOutputPlugin' -import {createLogger, FilePathKind, PromptKind} from './plugin-core' - -function createRootPrompt(content: string): ProjectRootMemoryPrompt { - return { - type: PromptKind.ProjectRootMemory, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Root, - path: '', - getDirectoryName: () => '' - }, - markdownContents: [] - } as ProjectRootMemoryPrompt -} - -function createChildPrompt( - workspaceBase: string, - projectName: string, - relativePath: string, - content: string -): ProjectChildrenMemoryPrompt { - return { - type: PromptKind.ProjectChildrenMemory, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - markdownContents: [], - dir: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: path.join(workspaceBase, projectName), - getDirectoryName: () => path.basename(relativePath), - getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) - }, - workingChildDirectoryPath: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: path.join(workspaceBase, projectName), - getDirectoryName: () => path.basename(relativePath), - getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) - } - } as ProjectChildrenMemoryPrompt -} - -describe('agentsOutputPlugin prompt-source project exclusion', () => { - it('skips prompt-source project files and still writes the synthetic workspace root prompt', async () => { - const plugin = new AgentsOutputPlugin() - const workspaceBase = path.resolve('tmp/agents-plugin') - const ctx = { - logger: createLogger('AgentsOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [ - { - name: '__workspace__', - isWorkspaceRootProject: true, - rootMemoryPrompt: createRootPrompt('workspace root') - }, - { - name: 'aindex', - isPromptSourceProject: true, - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'aindex', - basePath: workspaceBase, - getDirectoryName: () => 'aindex', - getAbsolutePath: () => path.join(workspaceBase, 'aindex') - }, - rootMemoryPrompt: createRootPrompt('prompt-source root'), - childMemoryPrompts: [createChildPrompt(workspaceBase, 'aindex', 'commands', 'prompt-source child')] - }, - { - name: 'project-a', - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceBase, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceBase, 'project-a') - }, - rootMemoryPrompt: createRootPrompt('project root'), - childMemoryPrompts: [createChildPrompt(workspaceBase, 'project-a', 'commands', 'project child')] - } - ] - } - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const paths = declarations.map(declaration => declaration.path) - const workspaceDeclaration = declarations.find(declaration => declaration.path === path.join(workspaceBase, 'AGENTS.md')) - const rootDeclaration = declarations.find(declaration => declaration.path === path.join(workspaceBase, 'project-a', 'AGENTS.md')) - const childDeclaration = declarations.find(declaration => declaration.path === path.join(workspaceBase, 'project-a', 'commands', 'AGENTS.md')) - - expect(paths).toContain(path.join(workspaceBase, 'AGENTS.md')) - expect(paths).toContain(path.join(workspaceBase, 'project-a', 'AGENTS.md')) - expect(paths).toContain(path.join(workspaceBase, 'project-a', 'commands', 'AGENTS.md')) - expect(paths).not.toContain(path.join(workspaceBase, 'aindex', 'AGENTS.md')) - expect(paths).not.toContain(path.join(workspaceBase, 'aindex', 'commands', 'AGENTS.md')) - if (workspaceDeclaration == null || rootDeclaration == null || childDeclaration == null) { - throw new Error('Expected AGENTS.md declarations were not emitted') - } - - await expect(plugin.convertContent(workspaceDeclaration, ctx)).resolves.toBe('workspace root') - await expect(plugin.convertContent(rootDeclaration, ctx)).resolves.toBe('project root') - await expect(plugin.convertContent(childDeclaration, ctx)).resolves.toBe('project child') - }) -}) diff --git a/cli/src/plugins/AgentsOutputPlugin.ts b/cli/src/plugins/AgentsOutputPlugin.ts deleted file mode 100644 index 1ae9ab13..00000000 --- a/cli/src/plugins/AgentsOutputPlugin.ts +++ /dev/null @@ -1,127 +0,0 @@ -import type { - OutputCleanContext, - OutputCleanupDeclarations, - OutputFileDeclaration, - OutputWriteContext -} from './plugin-core' -import {AbstractOutputPlugin} from './plugin-core' - -const PROJECT_MEMORY_FILE = 'AGENTS.md' - -export class AgentsOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('AgentsOutputPlugin', { - outputFileName: PROJECT_MEMORY_FILE, - treatWorkspaceRootProjectAsProject: true, - capabilities: { - prompt: { - scopes: ['project'], - singleScope: false - } - } - }) - } - - override async declareCleanupPaths( - ctx: OutputCleanContext - ): Promise { - const declarations = await super.declareCleanupPaths(ctx) - const promptSourceProjects - = ctx.collectedOutputContext.workspace.projects.filter( - project => project.isPromptSourceProject === true - ) - const promptSourceExcludeGlobs = promptSourceProjects - .map(project => project.dirFromWorkspacePath) - .filter((dir): dir is NonNullable => dir != null) - .map(dir => this.resolvePath(dir.basePath, dir.path, '**')) - - return { - ...declarations, - delete: [ - ...declarations.delete ?? [], - ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) - ], - excludeScanGlobs: [ - ...declarations.excludeScanGlobs ?? [], - ...promptSourceExcludeGlobs - ] - } - } - - override async declareOutputFiles( - ctx: OutputWriteContext - ): Promise { - const results: OutputFileDeclaration[] = [] - const promptProjects = this.getProjectPromptOutputProjects(ctx) - const activePromptScopes = new Set( - this.selectPromptScopes(ctx, ['project']) - ) - if (!activePromptScopes.has('project')) return results - - for (const [projectIndex, project] of promptProjects.entries()) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (project.rootMemoryPrompt != null && projectRootDir != null) { - results.push({ - path: this.resolvePath(projectRootDir, PROJECT_MEMORY_FILE), - scope: 'project', - source: {type: 'projectRootMemory', projectIndex} - }) - } - - if (project.childMemoryPrompts != null) { - for (const [ - childIndex, - child - ] of project.childMemoryPrompts.entries()) { - results.push({ - path: this.resolveFullPath(child.dir), - scope: 'project', - source: {type: 'projectChildMemory', projectIndex, childIndex} - }) - } - } - } - - return results - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - const projects = this.getProjectPromptOutputProjects(ctx) - const source = declaration.source as { - type?: string - projectIndex?: number - childIndex?: number - } - - const projectIndex = source.projectIndex ?? -1 - if (projectIndex < 0 || projectIndex >= projects.length) - { throw new Error(`Invalid project index in declaration for ${this.name}`) } - - const project = projects[projectIndex] - if (project == null) - { throw new Error(`Project not found for declaration in ${this.name}`) } - - if (source.type === 'projectRootMemory') { - if (project.rootMemoryPrompt == null) - { throw new Error( - `Root memory prompt missing for project index ${projectIndex}` - ) } - return project.rootMemoryPrompt.content as string - } - - if (source.type === 'projectChildMemory') { - const childIndex = source.childIndex ?? -1 - const child = project.childMemoryPrompts?.[childIndex] - if (child == null) - { throw new Error( - `Child memory prompt missing for project ${projectIndex}, child ${childIndex}` - ) } - return child.content as string - } - - throw new Error(`Unsupported declaration source for ${this.name}`) - } -} diff --git a/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts b/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts deleted file mode 100644 index cb4fa6f9..00000000 --- a/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts +++ /dev/null @@ -1,123 +0,0 @@ -import type { - OutputCleanContext, - OutputCleanupDeclarations, - RulePrompt -} from './plugin-core' -import {doubleQuoted} from '@truenine/md-compiler/markdown' -import {AbstractOutputPlugin} from './plugin-core' - -const PROJECT_MEMORY_FILE = 'CLAUDE.md' -const GLOBAL_CONFIG_DIR = '.claude' -const COMMANDS_SUBDIR = 'commands' -const AGENTS_SUBDIR = 'agents' -const SKILLS_SUBDIR = 'skills' - -/** - * Output plugin for Claude Code CLI. - * - * Outputs rules to `.claude/rules/` directory with frontmatter format. - * - * @see https://github.com/anthropics/claude-code/issues/26868 - * Known bug: Claude Code CLI has issues with `.claude/rules` directory handling. - * This may affect rule loading behavior in certain scenarios. - */ -export class ClaudeCodeCLIOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('ClaudeCodeCLIOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: PROJECT_MEMORY_FILE, - treatWorkspaceRootProjectAsProject: true, - toolPreset: 'claudeCode', - commands: { - subDir: COMMANDS_SUBDIR, - transformFrontMatter: (_cmd, context) => - context.sourceFrontMatter ?? {} - }, - subagents: { - subDir: AGENTS_SUBDIR, - sourceScopes: ['project'], - includePrefix: true, - linkSymbol: '-', - ext: '.md' - }, - skills: { - subDir: SKILLS_SUBDIR - }, - rules: { - transformFrontMatter: (rule: RulePrompt) => ({ - paths: rule.globs.map(doubleQuoted) - }) - }, - cleanup: { - delete: { - project: { - dirs: [ - '.claude/rules', - '.claude/commands', - '.claude/agents', - '.claude/skills' - ] - }, - global: { - files: ['.claude/CLAUDE.md'], - dirs: [ - '.claude/rules', - '.claude/commands', - '.claude/agents', - '.claude/skills' - ] - } - } - }, - wslMirrors: ['~/.claude/settings.json', '~/.claude/config.json'], - capabilities: { - prompt: { - scopes: ['project', 'global'], - singleScope: false - }, - rules: { - scopes: ['project', 'global'], - singleScope: false - }, - commands: { - scopes: ['project', 'global'], - singleScope: true - }, - subagents: { - scopes: ['project'], - singleScope: true - }, - skills: { - scopes: ['project', 'global'], - singleScope: true - } - } - }) - } - - override async declareCleanupPaths( - ctx: OutputCleanContext - ): Promise { - const declarations = await super.declareCleanupPaths(ctx) - const promptSourceProjects - = ctx.collectedOutputContext.workspace.projects.filter( - project => project.isPromptSourceProject === true - ) - const promptSourceExcludeGlobs = promptSourceProjects - .map(project => project.dirFromWorkspacePath) - .filter((dir): dir is NonNullable => dir != null) - .map(dir => this.resolvePath(dir.basePath, dir.path, '**')) - - return { - ...declarations, - delete: [ - ...declarations.delete ?? [], - ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) - ], - excludeScanGlobs: [ - ...declarations.excludeScanGlobs ?? [], - ...promptSourceExcludeGlobs - ] - } - } -} diff --git a/cli/src/plugins/CodexCLIOutputPlugin.test.ts b/cli/src/plugins/CodexCLIOutputPlugin.test.ts deleted file mode 100644 index ff516f92..00000000 --- a/cli/src/plugins/CodexCLIOutputPlugin.test.ts +++ /dev/null @@ -1,364 +0,0 @@ -import type {CommandPrompt, InputCapabilityContext, OutputCleanContext, OutputWriteContext, SubAgentPrompt} from './plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {collectDeletionTargets} from '../commands/CleanupUtils' -import {mergeConfig} from '../config' -import {CommandInputCapability} from '../inputs/input-command' -import {CodexCLIOutputPlugin} from './CodexCLIOutputPlugin' -import {createLogger, FilePathKind, PromptKind} from './plugin-core' - -class TestCodexCLIOutputPlugin extends CodexCLIOutputPlugin { - constructor(private readonly testHomeDir: string) { - super() - } - - protected override getHomeDir(): string { - return this.testHomeDir - } -} - -async function withTempCodexDirs( - prefix: string, - run: (paths: {readonly workspace: string, readonly homeDir: string}) => Promise -): Promise { - const workspace = fs.mkdtempSync(path.join(os.tmpdir(), `${prefix}-workspace-`)) - const homeDir = fs.mkdtempSync(path.join(os.tmpdir(), `${prefix}-home-`)) - - try { - await run({workspace, homeDir}) - } - finally { - fs.rmSync(workspace, {recursive: true, force: true}) - fs.rmSync(homeDir, {recursive: true, force: true}) - } -} - -function createInputContext(tempWorkspace: string): InputCapabilityContext { - return { - logger: createLogger('CodexCLIOutputPluginTest', 'error'), - fs, - path, - glob, - userConfigOptions: mergeConfig({workspaceDir: tempWorkspace}), - dependencyContext: {} - } as InputCapabilityContext -} - -function createCleanContext(): OutputCleanContext { - return { - logger: { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - }, - fs, - path, - glob, - dryRun: true, - runtimeTargets: { - jetbrainsCodexDirs: [] - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Relative, - path: '.', - basePath: '.', - getDirectoryName: () => '.', - getAbsolutePath: () => path.resolve('.') - }, - projects: [] - } - } - } as OutputCleanContext -} - -function createWriteContext( - tempWorkspace: string, - commands: readonly CommandPrompt[], - subAgents: readonly SubAgentPrompt[] = [], - pluginOptions?: OutputWriteContext['pluginOptions'] -): OutputWriteContext { - return { - logger: createLogger('CodexCLIOutputPluginTest', 'error'), - fs, - path, - glob, - dryRun: true, - ...pluginOptions != null && {pluginOptions}, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: tempWorkspace, - getDirectoryName: () => path.basename(tempWorkspace) - }, - projects: [{ - name: 'project-a', - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: tempWorkspace, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(tempWorkspace, 'project-a') - }, - isPromptSourceProject: true - }, { - name: 'project-b', - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-b', - basePath: tempWorkspace, - getDirectoryName: () => 'project-b', - getAbsolutePath: () => path.join(tempWorkspace, 'project-b') - } - }] - }, - commands, - subAgents - } - } as OutputWriteContext -} - -function createProjectCommandPrompt(): CommandPrompt { - return { - type: PromptKind.Command, - content: 'project command body', - length: 22, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'commands/dev/build.mdx', - basePath: path.resolve('tmp/dist/commands'), - getDirectoryName: () => 'dev', - getAbsolutePath: () => path.resolve('tmp/dist/commands/dev/build.mdx') - }, - commandPrefix: 'dev', - commandName: 'build', - yamlFrontMatter: { - description: 'Project command', - scope: 'project' - }, - markdownContents: [] - } as CommandPrompt -} - -function createCommandPromptWithToolFields(): CommandPrompt { - return { - ...createProjectCommandPrompt(), - yamlFrontMatter: { - description: 'Tool-aware command', - scope: 'project', - allowTools: ['shell'], - allowedTools: ['shell'] - } as unknown as CommandPrompt['yamlFrontMatter'] - } as CommandPrompt -} - -function createSubAgentPrompt(scope: 'project' | 'global'): SubAgentPrompt { - return { - type: PromptKind.SubAgent, - content: 'Review changes carefully.\nFocus on concrete regressions.', - length: 55, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'subagents/qa/reviewer.mdx', - basePath: path.resolve('tmp/dist/subagents'), - getDirectoryName: () => 'qa', - getAbsolutePath: () => path.resolve('tmp/dist/subagents/qa/reviewer.mdx') - }, - agentPrefix: 'qa', - agentName: 'reviewer', - canonicalName: 'qa-reviewer', - yamlFrontMatter: { - description: 'Review pull requests', - scope, - model: 'gpt-5.2', - allowTools: ['shell'], - color: 'blue', - nickname_candidates: ['guard'], - sandbox_mode: 'workspace-write', - mcp_servers: { - docs: { - command: 'node', - args: ['mcp.js'] - } - } - } as unknown as SubAgentPrompt['yamlFrontMatter'], - markdownContents: [] - } as SubAgentPrompt -} - -describe('codexCLIOutputPlugin command output', () => { - it('renders codex commands from dist content instead of the zh source prompt', async () => { - await withTempCodexDirs('tnmsc-codex-command', async ({workspace, homeDir}) => { - const srcDir = path.join(workspace, 'aindex', 'commands', 'find') - const distDir = path.join(workspace, 'aindex', 'dist', 'commands', 'find') - - fs.mkdirSync(srcDir, {recursive: true}) - fs.mkdirSync(distDir, {recursive: true}) - - fs.writeFileSync(path.join(srcDir, 'opensource.src.mdx'), [ - 'export default {', - ' description: \'中文源描述\',', - '}', - '', - '中文源命令内容', - '' - ].join('\n'), 'utf8') - fs.writeFileSync(path.join(distDir, 'opensource.mdx'), [ - 'export default {', - ' description: \'English dist description\',', - '}', - '', - 'English dist command body', - '' - ].join('\n'), 'utf8') - - const commandInputCapability = new CommandInputCapability() - const collected = await commandInputCapability.collect(createInputContext(workspace)) - const commands = collected.commands ?? [] - - expect(commands).toHaveLength(1) - - const codexPlugin = new TestCodexCLIOutputPlugin(homeDir) - const writeCtx = createWriteContext(workspace, commands) - const declarations = await codexPlugin.declareOutputFiles(writeCtx) - const commandDeclaration = declarations.find( - declaration => declaration.path.replaceAll('\\', '/').endsWith('/.codex/prompts/find-opensource.md') - ) - - expect(commandDeclaration).toBeDefined() - if (commandDeclaration == null) throw new Error('Expected codex command declaration') - - const rendered = await codexPlugin.convertContent(commandDeclaration, writeCtx) - expect(String(rendered)).toContain('English dist description') - expect(String(rendered)).toContain('English dist command body') - expect(String(rendered)).not.toContain('中文源描述') - expect(String(rendered)).not.toContain('中文源命令内容') - }) - }) - - it('keeps project-scoped commands in the global codex directory and never mirrors them into workspace root', async () => { - await withTempCodexDirs('tnmsc-codex-project-command', async ({workspace, homeDir}) => { - const plugin = new TestCodexCLIOutputPlugin(homeDir) - const writeCtx = createWriteContext(workspace, [createProjectCommandPrompt()]) - - const declarations = await plugin.declareOutputFiles(writeCtx) - - expect(declarations.map(declaration => declaration.path)).toContain( - path.join(homeDir, '.codex', 'prompts', 'dev-build.md') - ) - expect(declarations.map(declaration => declaration.path)).not.toContain( - path.join(workspace, '.codex', 'prompts', 'dev-build.md') - ) - expect(declarations.every(declaration => declaration.scope === 'global')).toBe(true) - }) - }) - - it('drops tool allowlist fields from codex command front matter', async () => { - await withTempCodexDirs('tnmsc-codex-command-tools', async ({workspace, homeDir}) => { - const plugin = new TestCodexCLIOutputPlugin(homeDir) - const writeCtx = createWriteContext(workspace, [createCommandPromptWithToolFields()]) - const declarations = await plugin.declareOutputFiles(writeCtx) - const declaration = declarations.find(item => item.path === path.join(homeDir, '.codex', 'prompts', 'dev-build.md')) - - expect(declaration).toBeDefined() - if (declaration == null) throw new Error('Expected codex command declaration') - - const rendered = await plugin.convertContent(declaration, writeCtx) - expect(String(rendered)).toContain('description: Tool-aware command') - expect(String(rendered)).not.toContain('allowTools') - expect(String(rendered)).not.toContain('allowedTools') - }) - }) - - it('writes project-scoped subagents into each project .codex/agents directory as toml', async () => { - await withTempCodexDirs('tnmsc-codex-project-subagent', async ({workspace, homeDir}) => { - const plugin = new TestCodexCLIOutputPlugin(homeDir) - const writeCtx = createWriteContext(workspace, [], [createSubAgentPrompt('project')]) - - const declarations = await plugin.declareOutputFiles(writeCtx) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toContain(path.join(workspace, 'project-a', '.codex', 'agents', 'qa-reviewer.toml')) - expect(paths).toContain(path.join(workspace, 'project-b', '.codex', 'agents', 'qa-reviewer.toml')) - expect(paths).not.toContain(path.join(homeDir, '.codex', 'agents', 'qa-reviewer.toml')) - - const declaration = declarations.find(item => item.path === path.join(workspace, 'project-a', '.codex', 'agents', 'qa-reviewer.toml')) - expect(declaration).toBeDefined() - if (declaration == null) throw new Error('Expected codex subagent declaration') - - const rendered = await plugin.convertContent(declaration, writeCtx) - expect(String(rendered)).toContain('name = "qa-reviewer"') - expect(String(rendered)).toContain('description = "Review pull requests"') - expect(String(rendered)).toContain([ - 'developer_instructions = """', - 'Review changes carefully.', - 'Focus on concrete regressions."""' - ].join('\n')) - expect(String(rendered)).toContain('nickname_candidates = ["guard"]') - expect(String(rendered)).toContain('sandbox_mode = "workspace-write"') - expect(String(rendered)).toContain('[mcp_servers]') - expect(String(rendered)).toContain('[mcp_servers.docs]') - expect(String(rendered)).not.toContain('model = ') - expect(String(rendered)).not.toContain('scope = ') - expect(String(rendered)).not.toContain('allowTools') - expect(String(rendered)).not.toContain('allowedTools') - expect(String(rendered)).not.toContain('color = ') - }) - }) - - it('remaps global-scoped subagents to project outputs instead of writing to the global codex directory', async () => { - await withTempCodexDirs('tnmsc-codex-global-subagent', async ({workspace, homeDir}) => { - const plugin = new TestCodexCLIOutputPlugin(homeDir) - const writeCtx = createWriteContext(workspace, [], [createSubAgentPrompt('global')]) - - const declarations = await plugin.declareOutputFiles(writeCtx) - - expect(declarations.map(declaration => declaration.path)).toContain( - path.join(workspace, 'project-a', '.codex', 'agents', 'qa-reviewer.toml') - ) - expect(declarations.map(declaration => declaration.path)).not.toContain( - path.join(homeDir, '.codex', 'agents', 'qa-reviewer.toml') - ) - expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) - }) - }) - - it('cleans global codex skills while preserving the built-in .system directory', async () => { - await withTempCodexDirs('tnmsc-codex-cleanup-skills', async ({homeDir}) => { - const plugin = new TestCodexCLIOutputPlugin(homeDir) - const skillsDir = path.join(homeDir, '.codex', 'skills') - const preservedDir = path.join(skillsDir, '.system') - const staleDir = path.join(skillsDir, 'legacy-skill') - - fs.mkdirSync(preservedDir, {recursive: true}) - fs.mkdirSync(staleDir, {recursive: true}) - fs.writeFileSync(path.join(preservedDir, 'SKILL.md'), '# preserved', 'utf8') - fs.writeFileSync(path.join(staleDir, 'SKILL.md'), '# stale', 'utf8') - - const cleanupDeclarations = await plugin.declareCleanupPaths(createCleanContext()) - const protectPaths = cleanupDeclarations.protect?.map(target => target.path.replaceAll('\\', '/')) ?? [] - const skillCleanupTarget = cleanupDeclarations.delete?.find(target => target.kind === 'glob' && target.path.includes(`${path.sep}.codex${path.sep}skills${path.sep}`)) - const cleanupPlan = await collectDeletionTargets([plugin], createCleanContext()) - const normalizedDeleteDirs = cleanupPlan.dirsToDelete.map(target => target.replaceAll('\\', '/')) - const normalizedPreservedDir = preservedDir.replaceAll('\\', '/') - const normalizedStaleDir = staleDir.replaceAll('\\', '/') - - expect(skillCleanupTarget).toBeDefined() - expect(skillCleanupTarget?.excludeBasenames).toEqual(['.system']) - expect(protectPaths).toContain(normalizedPreservedDir) - expect(normalizedDeleteDirs).toContain(normalizedStaleDir) - expect(normalizedDeleteDirs).not.toContain(normalizedPreservedDir) - expect(cleanupPlan.violations).toEqual([]) - }) - }) -}) diff --git a/cli/src/plugins/CodexCLIOutputPlugin.ts b/cli/src/plugins/CodexCLIOutputPlugin.ts deleted file mode 100644 index e11d54f9..00000000 --- a/cli/src/plugins/CodexCLIOutputPlugin.ts +++ /dev/null @@ -1,124 +0,0 @@ -import type {AbstractOutputPluginOptions, OutputCleanContext, OutputCleanupDeclarations} from './plugin-core' -import {AbstractOutputPlugin, PLUGIN_NAMES, resolveSubAgentCanonicalName} from './plugin-core' - -const PROJECT_MEMORY_FILE = 'AGENTS.md' -const GLOBAL_CONFIG_DIR = '.codex' -const PROMPTS_SUBDIR = 'prompts' -const AGENTS_SUBDIR = 'agents' -const SKILLS_SUBDIR = 'skills' -const PRESERVED_SYSTEM_SKILL_DIR = '.system' -const CODEX_SUBAGENT_FIELD_ORDER = ['name', 'description', 'developer_instructions'] as const -const CODEX_EXCLUDED_SUBAGENT_FIELDS = ['scope', 'seriName', 'argumentHint', 'color', 'namingCase', 'model'] as const - -function sanitizeCodexFrontMatter( - sourceFrontMatter?: Record -): Record { - const frontMatter = {...sourceFrontMatter} - - // Codex front matter rejects tool allowlists. Keep accepting upstream metadata - // for other outputs, but drop both common spellings here for Codex compatibility. - delete frontMatter['allowTools'] - delete frontMatter['allowedTools'] - return frontMatter -} - -function transformCodexSubAgentFrontMatter( - subAgentCanonicalName: string, - sourceFrontMatter?: Record -): Record { - const frontMatter = sanitizeCodexFrontMatter(sourceFrontMatter) - frontMatter['name'] = subAgentCanonicalName - return frontMatter -} - -const CODEX_OUTPUT_OPTIONS = { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: PROJECT_MEMORY_FILE, - commands: { - subDir: PROMPTS_SUBDIR, - scopeRemap: { - project: 'global' - }, - transformFrontMatter: (_cmd, context) => sanitizeCodexFrontMatter(context.sourceFrontMatter) - }, - subagents: { - subDir: AGENTS_SUBDIR, - sourceScopes: ['project'], - scopeRemap: { - global: 'project' - }, - ext: '.toml', - artifactFormat: 'toml', - bodyFieldName: 'developer_instructions', - excludedFrontMatterFields: CODEX_EXCLUDED_SUBAGENT_FIELDS, - transformFrontMatter: (subAgent, context) => transformCodexSubAgentFrontMatter(resolveSubAgentCanonicalName(subAgent), context.sourceFrontMatter), - fieldOrder: CODEX_SUBAGENT_FIELD_ORDER - }, - cleanup: { - delete: { - project: { - dirs: ['.codex/agents'] - }, - global: { - files: ['.codex/AGENTS.md'], - dirs: ['.codex/prompts'], - globs: ['.codex/skills/*'] - } - }, - protect: { - global: { - dirs: [`.codex/${SKILLS_SUBDIR}/${PRESERVED_SYSTEM_SKILL_DIR}`] - } - } - }, - wslMirrors: [ - '~/.codex/config.toml', - '~/.codex/auth.json' - ], - dependsOn: [PLUGIN_NAMES.AgentsOutput], - capabilities: { - prompt: { - scopes: ['global'], - singleScope: false - }, - commands: { - scopes: ['global'], - singleScope: true - }, - subagents: { - scopes: ['project'], - singleScope: true - } - } -} satisfies AbstractOutputPluginOptions - -export class CodexCLIOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('CodexCLIOutputPlugin', CODEX_OUTPUT_OPTIONS) - } - - /** - * Project-scoped output still writes to the workspace project, but Codex also - * resolves user-installed skills from `~/.codex/skills/`. Cleanup therefore - * needs to prune that global skills directory as well, while preserving the - * built-in `.system/` subtree. - */ - override async declareCleanupPaths(ctx: OutputCleanContext): Promise { - const declarations = await super.declareCleanupPaths(ctx) - - return { - ...declarations, - delete: (declarations.delete ?? []).map(target => { - if (target.kind !== 'glob') return target - - const normalizedPath = target.path.replaceAll('\\', '/') - if (!normalizedPath.endsWith(`/.codex/${SKILLS_SUBDIR}/*`)) return target - - return { - ...target, - excludeBasenames: [PRESERVED_SYSTEM_SKILL_DIR] - } - }) - } - } -} diff --git a/cli/src/plugins/CursorOutputPlugin.test.ts b/cli/src/plugins/CursorOutputPlugin.test.ts deleted file mode 100644 index 17fab4a4..00000000 --- a/cli/src/plugins/CursorOutputPlugin.test.ts +++ /dev/null @@ -1,351 +0,0 @@ -import type {CommandPrompt, GlobalMemoryPrompt, OutputCleanContext, OutputWriteContext, RulePrompt, SkillPrompt} from './plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {collectDeletionTargets} from '@/commands/CleanupUtils' -import {CursorOutputPlugin} from './CursorOutputPlugin' -import {createLogger, FilePathKind, PromptKind} from './plugin-core' - -class TestCursorOutputPlugin extends CursorOutputPlugin { - constructor(private readonly testHomeDir: string) { - super() - } - - protected override getHomeDir(): string { - return this.testHomeDir - } -} - -function createCleanContext(): OutputCleanContext { - return { - logger: { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - }, - fs, - path, - glob, - dryRun: true, - runtimeTargets: { - jetbrainsCodexDirs: [] - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Relative, - path: '.', - basePath: '.', - getDirectoryName: () => '.', - getAbsolutePath: () => path.resolve('.') - }, - projects: [] - } - } - } as OutputCleanContext -} - -function createCommandPrompt(): CommandPrompt { - return { - type: PromptKind.Command, - content: 'command body', - length: 12, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'commands/dev/build.mdx', - basePath: path.resolve('tmp/dist/commands'), - getDirectoryName: () => 'dev', - getAbsolutePath: () => path.resolve('tmp/dist/commands/dev/build.mdx') - }, - commandPrefix: 'dev', - commandName: 'build', - yamlFrontMatter: { - description: 'Build', - scope: 'project' - }, - markdownContents: [] - } as CommandPrompt -} - -function createGlobalMemoryPrompt(): GlobalMemoryPrompt { - return { - type: PromptKind.GlobalMemory, - content: 'global prompt', - length: 13, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'aindex/dist/global.mdx', - basePath: path.resolve('.'), - getDirectoryName: () => 'dist', - getAbsolutePath: () => path.resolve('aindex/dist/global.mdx') - }, - markdownContents: [] - } as GlobalMemoryPrompt -} - -function createSkillPrompt( - scope: 'project' | 'global' = 'project', - name: string = 'ship-it' -): SkillPrompt { - return { - type: PromptKind.Skill, - content: 'skill body', - length: 10, - filePathKind: FilePathKind.Relative, - skillName: name, - dir: { - pathKind: FilePathKind.Relative, - path: `skills/${name}`, - basePath: path.resolve('tmp/dist/skills'), - getDirectoryName: () => name, - getAbsolutePath: () => path.resolve('tmp/dist/skills', name) - }, - yamlFrontMatter: { - description: 'Ship release', - scope - }, - mcpConfig: { - type: PromptKind.SkillMcpConfig, - mcpServers: { - inspector: { - command: 'npx', - args: ['inspector'] - } - }, - rawContent: '{"mcpServers":{"inspector":{"command":"npx","args":["inspector"]}}}' - }, - markdownContents: [] - } as SkillPrompt -} - -function createRulePrompt(): RulePrompt { - return { - type: PromptKind.Rule, - content: 'rule body', - length: 9, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'rules/ops/guard.mdx', - basePath: path.resolve('tmp/dist/rules'), - getDirectoryName: () => 'ops', - getAbsolutePath: () => path.resolve('tmp/dist/rules/ops/guard.mdx') - }, - prefix: 'ops', - ruleName: 'guard', - globs: ['src/**'], - scope: 'project', - markdownContents: [] - } as RulePrompt -} - -describe('cursorOutputPlugin cleanup', () => { - it('declares cleanup exclusions for built-in skills and lets core cleanup skip them', async () => { - const tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cursor-cleanup-')) - const skillsDir = path.join(tempHomeDir, '.cursor', 'skills-cursor') - const preservedDir = path.join(skillsDir, 'create-rule') - const staleDir = path.join(skillsDir, 'legacy-skill') - - fs.mkdirSync(preservedDir, {recursive: true}) - fs.mkdirSync(staleDir, {recursive: true}) - fs.writeFileSync(path.join(preservedDir, 'SKILL.md'), '# preserved', 'utf8') - fs.writeFileSync(path.join(staleDir, 'SKILL.md'), '# stale', 'utf8') - - try { - const plugin = new TestCursorOutputPlugin(tempHomeDir) - const result = await plugin.declareCleanupPaths(createCleanContext()) - const protectPaths = result.protect?.map(target => target.path.replaceAll('\\', '/')) ?? [] - const normalizedCommandsDir = path.join(tempHomeDir, '.cursor', 'commands').replaceAll('\\', '/') - const normalizedStaleDir = staleDir.replaceAll('\\', '/') - const normalizedPreservedDir = preservedDir.replaceAll('\\', '/') - const skillCleanupTarget = result.delete?.find(target => target.kind === 'glob' && target.path.includes('skills')) - const cleanupPlan = await collectDeletionTargets([plugin], createCleanContext()) - const normalizedDeleteDirs = cleanupPlan.dirsToDelete.map(target => target.replaceAll('\\', '/')) - - expect(result.delete?.map(target => target.path.replaceAll('\\', '/')) ?? []).toContain(normalizedCommandsDir) - expect(skillCleanupTarget?.excludeBasenames).toEqual(expect.arrayContaining(['create-rule'])) - expect(normalizedDeleteDirs).toContain(normalizedStaleDir) - expect(normalizedDeleteDirs).not.toContain(normalizedPreservedDir) - expect(protectPaths).toContain(normalizedPreservedDir) - } - finally { - fs.rmSync(tempHomeDir, {recursive: true, force: true}) - } - }) - - it('writes project-scoped commands, skills, mcp, and rules into workspace root through the synthetic workspace project', async () => { - const workspaceBase = path.resolve('tmp/cursor-workspace') - const plugin = new TestCursorOutputPlugin(path.join(workspaceBase, 'home')) - const ctx = { - logger: createLogger('CursorOutputPlugin', 'error'), - fs, - path, - glob, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [{ - name: '__workspace__', - isWorkspaceRootProject: true - }] - }, - commands: [createCommandPrompt()], - skills: [createSkillPrompt()], - rules: [createRulePrompt()] - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toContain(path.join(workspaceBase, '.cursor', 'commands', 'dev-build.md')) - expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills', 'ship-it', 'SKILL.md')) - expect(paths).toContain(path.join(workspaceBase, '.cursor', 'mcp.json')) - expect(paths).toContain(path.join(workspaceBase, '.cursor', 'rules', 'rule-ops-guard.md')) - expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) - }) - - it('keeps skill files global when only mcp is project-scoped', async () => { - const workspaceBase = path.resolve('tmp/cursor-split-scope-project-mcp') - const homeDir = path.join(workspaceBase, 'home') - const plugin = new TestCursorOutputPlugin(homeDir) - const ctx = { - logger: createLogger('CursorOutputPlugin', 'error'), - fs, - path, - glob, - dryRun: true, - pluginOptions: { - outputScopes: { - plugins: { - CursorOutputPlugin: { - skills: 'global', - mcp: 'project' - } - } - } - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [{ - name: '__workspace__', - isWorkspaceRootProject: true - }] - }, - skills: [ - createSkillPrompt('project', 'inspect-locally'), - createSkillPrompt('global', 'ship-it') - ] - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'ship-it', 'SKILL.md')) - expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills', 'inspect-locally', 'mcp.json')) - expect(paths).toContain(path.join(workspaceBase, '.cursor', 'mcp.json')) - expect(paths).not.toContain(path.join(workspaceBase, '.cursor', 'skills', 'ship-it', 'SKILL.md')) - expect(paths).not.toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'inspect-locally', 'SKILL.md')) - expect(paths).not.toContain(path.join(homeDir, '.cursor', 'mcp.json')) - }) - - it('keeps skill files project-scoped when only mcp is global-scoped', async () => { - const workspaceBase = path.resolve('tmp/cursor-split-scope-global-mcp') - const homeDir = path.join(workspaceBase, 'home') - const plugin = new TestCursorOutputPlugin(homeDir) - const ctx = { - logger: createLogger('CursorOutputPlugin', 'error'), - fs, - path, - glob, - dryRun: true, - pluginOptions: { - outputScopes: { - plugins: { - CursorOutputPlugin: { - skills: 'project', - mcp: 'global' - } - } - } - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [{ - name: '__workspace__', - isWorkspaceRootProject: true - }] - }, - skills: [ - createSkillPrompt('project', 'ship-it'), - createSkillPrompt('global', 'inspect-globally') - ] - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills', 'ship-it', 'SKILL.md')) - expect(paths).toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'inspect-globally', 'mcp.json')) - expect(paths).toContain(path.join(homeDir, '.cursor', 'mcp.json')) - expect(paths).not.toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'ship-it', 'SKILL.md')) - expect(paths).not.toContain(path.join(workspaceBase, '.cursor', 'skills', 'inspect-globally', 'SKILL.md')) - expect(paths).not.toContain(path.join(workspaceBase, '.cursor', 'mcp.json')) - }) - - it('writes the global prompt to workspace root through the synthetic workspace project', async () => { - const workspaceBase = path.resolve('tmp/cursor-workspace-global-prompt') - const plugin = new TestCursorOutputPlugin(path.join(workspaceBase, 'home')) - const ctx = { - logger: createLogger('CursorOutputPlugin', 'error'), - fs, - path, - glob, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [{ - name: '__workspace__', - isWorkspaceRootProject: true - }] - }, - globalMemory: createGlobalMemoryPrompt() - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - - expect(declarations.map(declaration => declaration.path)).toContain( - path.join(workspaceBase, '.cursor', 'rules', 'global.mdc') - ) - }) -}) diff --git a/cli/src/plugins/CursorOutputPlugin.ts b/cli/src/plugins/CursorOutputPlugin.ts deleted file mode 100644 index 4fc08530..00000000 --- a/cli/src/plugins/CursorOutputPlugin.ts +++ /dev/null @@ -1,561 +0,0 @@ -import type { - CommandPrompt, - OutputCleanContext, - OutputCleanupDeclarations, - OutputFileDeclaration, - OutputWriteContext, - RulePrompt, - SkillPrompt -} from './plugin-core' -import {Buffer} from 'node:buffer' -import * as path from 'node:path' -import { - AbstractOutputPlugin, - applySubSeriesGlobPrefix, - collectMcpServersFromSkills, - filterByProjectConfig, - GlobalConfigDirs, - IgnoreFiles, - OutputFileNames, - OutputSubdirectories, - PLUGIN_NAMES, - PreservedSkills, - transformMcpConfigForCursor, - transformMcpServerMap -} from './plugin-core' - -const GLOBAL_CONFIG_DIR = GlobalConfigDirs.CURSOR -const MCP_CONFIG_FILE = OutputFileNames.MCP_CONFIG -const COMMANDS_SUBDIR = OutputSubdirectories.COMMANDS -const RULES_SUBDIR = OutputSubdirectories.RULES -const GLOBAL_RULE_FILE = OutputFileNames.CURSOR_GLOBAL_RULE -const SKILLS_CURSOR_SUBDIR = OutputSubdirectories.CURSOR_SKILLS -const SKILLS_PROJECT_SUBDIR = 'skills' -const SKILL_FILE_NAME = OutputFileNames.SKILL -const PRESERVED_SKILLS = PreservedSkills.CURSOR - -type CursorOutputSource - = | {readonly kind: 'command', readonly command: CommandPrompt} - | { - readonly kind: 'mcpConfig' - readonly mcpServers: Record> - } - | {readonly kind: 'skill', readonly skill: SkillPrompt} - | {readonly kind: 'skillMcpConfig', readonly rawContent: string} - | {readonly kind: 'skillChildDoc', readonly content: string} - | { - readonly kind: 'skillResource' - readonly content: string - readonly encoding: 'text' | 'base64' - } - | {readonly kind: 'globalRuleContent', readonly content: string} - | {readonly kind: 'ruleMdc', readonly rule: RulePrompt} - | {readonly kind: 'ignoreFile', readonly content: string} - -export class CursorOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('CursorOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: '', - treatWorkspaceRootProjectAsProject: true, - dependsOn: [PLUGIN_NAMES.AgentsOutput], - indexignore: IgnoreFiles.CURSOR, - commands: { - subDir: COMMANDS_SUBDIR, - transformFrontMatter: (_cmd, context) => - context.sourceFrontMatter ?? {} - }, - skills: { - subDir: SKILLS_CURSOR_SUBDIR - }, - rules: { - subDir: RULES_SUBDIR, - prefix: 'rule', - sourceScopes: ['project', 'global'] - }, - cleanup: { - delete: { - project: { - files: ['.cursor/mcp.json'], - dirs: ['.cursor/commands', '.cursor/rules'], - globs: ['.cursor/skills/*', '.cursor/skills-cursor/*'] - }, - global: { - files: ['.cursor/mcp.json'], - dirs: ['.cursor/commands', '.cursor/rules'], - globs: ['.cursor/skills-cursor/*'] - } - }, - protect: { - global: { - dirs: Array.from( - PRESERVED_SKILLS, - skillName => `.cursor/skills-cursor/${skillName}` - ) - } - }, - excludeScanGlobs: Array.from( - PRESERVED_SKILLS, - skillName => `.cursor/skills-cursor/${skillName}/**` - ) - }, - capabilities: { - prompt: { - scopes: ['global'], - singleScope: false - }, - rules: { - scopes: ['project', 'global'], - singleScope: false - }, - commands: { - scopes: ['project', 'global'], - singleScope: true - }, - skills: { - scopes: ['project', 'global'], - singleScope: true - }, - mcp: { - scopes: ['project', 'global'], - singleScope: true - } - } - }) - } - - override async declareCleanupPaths( - ctx: OutputCleanContext - ): Promise { - const declarations = await super.declareCleanupPaths(ctx) - return { - ...declarations, - delete: (declarations.delete ?? []).map(target => { - if (target.kind !== 'glob') return target - - const normalizedPath = target.path.replaceAll('\\', '/') - if (!normalizedPath.endsWith(`/.cursor/${SKILLS_CURSOR_SUBDIR}/*`)) - { return target } - - return { - ...target, - excludeBasenames: [...PRESERVED_SKILLS] - } - }) - } - } - - override async declareOutputFiles( - ctx: OutputWriteContext - ): Promise { - const declarations: OutputFileDeclaration[] = [] - const {globalMemory, commands, skills, rules, aiAgentIgnoreConfigFiles} - = ctx.collectedOutputContext - const globalDir = this.getGlobalConfigDir() - const promptSourceProjectConfig - = this.resolvePromptSourceProjectConfig(ctx) - const concreteProjects = this.getConcreteProjects(ctx) - const promptProjects = this.getProjectPromptOutputProjects(ctx) - const transformOptions = this.getTransformOptionsFromContext(ctx, { - includeSeriesPrefix: true - }) - const activePromptScopes = new Set( - this.selectPromptScopes(ctx, ['global']) - ) - const activeRuleScopes = new Set( - rules != null ? this.selectRuleScopes(ctx, rules) : [] - ) - const selectedSkills - = skills != null - ? this.selectSingleScopeItems( - skills, - this.skillsConfig.sourceScopes, - skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'skills') - ) - : {items: [] as readonly SkillPrompt[]} - const selectedMcpSkills - = skills != null - ? this.selectSingleScopeItems( - skills, - this.skillsConfig.sourceScopes, - skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'mcp') - ?? this.getTopicScopeOverride(ctx, 'skills') - ) - : {items: [] as readonly SkillPrompt[]} - const selectedCommands - = commands != null - ? this.selectSingleScopeItems( - commands, - this.commandsConfig.sourceScopes, - command => this.resolveCommandSourceScope(command), - this.getTopicScopeOverride(ctx, 'commands') - ) - : {items: [] as readonly CommandPrompt[]} - - const pushSkillDeclarations = ( - baseDir: string, - scope: 'project' | 'global', - filteredSkills: readonly SkillPrompt[] - ): void => { - const skillsSubDir - = scope === 'global' ? SKILLS_CURSOR_SUBDIR : SKILLS_PROJECT_SUBDIR - for (const skill of filteredSkills) { - const skillName = this.getSkillName(skill) - if (this.isPreservedSkill(skillName)) continue - - const skillDir = path.join(baseDir, skillsSubDir, skillName) - declarations.push({ - path: path.join(skillDir, SKILL_FILE_NAME), - scope, - source: {kind: 'skill', skill} satisfies CursorOutputSource - }) - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) { - declarations.push({ - path: path.join( - skillDir, - childDoc.relativePath.replace(/\.mdx$/, '.md') - ), - scope, - source: { - kind: 'skillChildDoc', - content: childDoc.content as string - } satisfies CursorOutputSource - }) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - declarations.push({ - path: path.join(skillDir, resource.relativePath), - scope, - source: { - kind: 'skillResource', - content: resource.content, - encoding: resource.encoding - } satisfies CursorOutputSource - }) - } - } - } - } - - const pushSkillMcpDeclarations = ( - baseDir: string, - scope: 'project' | 'global', - filteredMcpSkills: readonly SkillPrompt[] - ): void => { - const skillsSubDir - = scope === 'global' ? SKILLS_CURSOR_SUBDIR : SKILLS_PROJECT_SUBDIR - for (const skill of filteredMcpSkills) { - if (skill.mcpConfig == null) continue - - const skillDir = path.join( - baseDir, - skillsSubDir, - this.getSkillName(skill) - ) - declarations.push({ - path: path.join(skillDir, MCP_CONFIG_FILE), - scope, - source: { - kind: 'skillMcpConfig', - rawContent: skill.mcpConfig.rawContent - } satisfies CursorOutputSource - }) - } - } - - const pushMcpDeclaration = ( - baseDir: string, - scope: 'project' | 'global', - filteredSkills: readonly SkillPrompt[] - ): void => { - if (filteredSkills.length === 0) return - - const servers = collectMcpServersFromSkills(filteredSkills, this.log) - if (servers.size === 0) return - - declarations.push({ - path: path.join(baseDir, MCP_CONFIG_FILE), - scope, - source: { - kind: 'mcpConfig', - mcpServers: transformMcpServerMap( - servers, - transformMcpConfigForCursor - ) - } satisfies CursorOutputSource - }) - } - - if ( - selectedSkills.selectedScope === 'project' - || selectedMcpSkills.selectedScope === 'project' - ) { - for (const project of this.getProjectOutputProjects(ctx)) { - const baseDir = this.resolveProjectConfigDir(ctx, project) - if (baseDir == null) continue - - if (selectedSkills.selectedScope === 'project') { - const filteredSkills = filterByProjectConfig( - selectedSkills.items, - project.projectConfig, - 'skills' - ) - pushSkillDeclarations(baseDir, 'project', filteredSkills) - } - - if (selectedMcpSkills.selectedScope === 'project') { - const filteredMcpSkills = filterByProjectConfig( - selectedMcpSkills.items, - project.projectConfig, - 'skills' - ) - pushSkillMcpDeclarations(baseDir, 'project', filteredMcpSkills) - pushMcpDeclaration(baseDir, 'project', filteredMcpSkills) - } - } - } - - if ( - selectedSkills.selectedScope === 'global' - || selectedMcpSkills.selectedScope === 'global' - ) { - if (selectedSkills.selectedScope === 'global') { - const filteredSkills = filterByProjectConfig( - selectedSkills.items, - promptSourceProjectConfig, - 'skills' - ) - pushSkillDeclarations(globalDir, 'global', filteredSkills) - } - - if (selectedMcpSkills.selectedScope === 'global') { - const filteredMcpSkills = filterByProjectConfig( - selectedMcpSkills.items, - promptSourceProjectConfig, - 'skills' - ) - pushSkillMcpDeclarations(globalDir, 'global', filteredMcpSkills) - pushMcpDeclaration(globalDir, 'global', filteredMcpSkills) - } - } - - if (selectedCommands.selectedScope === 'project') { - for (const project of this.getProjectOutputProjects(ctx)) { - const baseDir = this.resolveProjectConfigDir(ctx, project) - if (baseDir == null) continue - - const filteredCommands = filterByProjectConfig( - selectedCommands.items, - project.projectConfig, - 'commands' - ) - for (const command of filteredCommands) { - declarations.push({ - path: path.join( - baseDir, - COMMANDS_SUBDIR, - this.transformCommandName(command, transformOptions) - ), - scope: 'project', - source: {kind: 'command', command} satisfies CursorOutputSource - }) - } - } - } - - if (selectedCommands.selectedScope === 'global') { - const filteredCommands = filterByProjectConfig( - selectedCommands.items, - promptSourceProjectConfig, - 'commands' - ) - for (const command of filteredCommands) { - declarations.push({ - path: path.join( - globalDir, - COMMANDS_SUBDIR, - this.transformCommandName(command, transformOptions) - ), - scope: 'global', - source: {kind: 'command', command} satisfies CursorOutputSource - }) - } - } - - if (rules != null && rules.length > 0) { - const globalRules = rules.filter( - rule => - this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'global' - ) - if (activeRuleScopes.has('global')) { - for (const rule of globalRules) { - declarations.push({ - path: path.join( - globalDir, - RULES_SUBDIR, - this.buildRuleFileName(rule) - ), - scope: 'global', - source: {kind: 'ruleMdc', rule} satisfies CursorOutputSource - }) - } - } - - if (activeRuleScopes.has('project')) { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectBaseDir = this.resolveProjectConfigDir(ctx, project) - if (projectBaseDir == null) continue - const projectRules = applySubSeriesGlobPrefix( - filterByProjectConfig( - rules.filter( - rule => - this.normalizeSourceScope(this.normalizeRuleScope(rule)) - === 'project' - ), - project.projectConfig, - 'rules' - ), - project.projectConfig - ) - for (const rule of projectRules) { - declarations.push({ - path: path.join( - projectBaseDir, - RULES_SUBDIR, - this.buildRuleFileName(rule) - ), - scope: 'project', - source: {kind: 'ruleMdc', rule} satisfies CursorOutputSource - }) - } - } - } - } - - if (globalMemory != null && activePromptScopes.has('global')) { - const globalRuleContent = this.buildGlobalRuleContent( - globalMemory.content as string, - ctx - ) - for (const project of promptProjects) { - const projectBaseDir = this.resolveProjectConfigDir(ctx, project) - if (projectBaseDir == null) continue - declarations.push({ - path: path.join(projectBaseDir, RULES_SUBDIR, GLOBAL_RULE_FILE), - scope: 'project', - source: { - kind: 'globalRuleContent', - content: globalRuleContent - } satisfies CursorOutputSource - }) - } - } - - const ignoreOutputPath = this.getIgnoreOutputPath() - const ignoreFile - = this.indexignore == null - ? void 0 - : aiAgentIgnoreConfigFiles?.find( - file => file.fileName === this.indexignore - ) - if (ignoreOutputPath != null && ignoreFile != null) { - for (const project of concreteProjects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null || project.isPromptSourceProject === true) - { continue } - declarations.push({ - path: path.join( - projectDir.basePath, - projectDir.path, - ignoreOutputPath - ), - scope: 'project', - source: { - kind: 'ignoreFile', - content: ignoreFile.content - } satisfies CursorOutputSource - }) - } - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - const source = declaration.source as CursorOutputSource - switch (source.kind) { - case 'command': - return this.buildCommandContent(source.command, ctx) - case 'mcpConfig': - return JSON.stringify({mcpServers: source.mcpServers}, null, 2) - case 'skill': { - const frontMatterData = this.buildSkillFrontMatter(source.skill) - return this.buildMarkdownContent( - source.skill.content as string, - frontMatterData, - ctx - ) - } - case 'skillMcpConfig': - return source.rawContent - case 'skillChildDoc': - case 'globalRuleContent': - case 'ignoreFile': - return source.content - case 'skillResource': - return source.encoding === 'base64' - ? Buffer.from(source.content, 'base64') - : source.content - case 'ruleMdc': - return this.buildRuleMdcContent(source.rule, ctx) - default: - throw new Error(`Unsupported declaration source for ${this.name}`) - } - } - - private buildGlobalRuleContent( - content: string, - ctx: OutputWriteContext - ): string { - return this.buildMarkdownContent( - content, - {description: 'Global prompt (synced)', alwaysApply: true}, - ctx - ) - } - - private isPreservedSkill(name: string): boolean { - return PRESERVED_SKILLS.has(name) - } - - protected buildRuleMdcContent( - rule: RulePrompt, - ctx?: OutputWriteContext - ): string { - const fmData: Record = { - alwaysApply: false, - globs: rule.globs.length > 0 ? rule.globs.join(', ') : '' - } - const raw = this.buildMarkdownContent(rule.content, fmData, ctx) - const lines = raw.split('\n') - const transformedLines = lines.map(line => { - const match = /^(\s*globs:\s*)(['"])(.*)\2\s*$/.exec(line) - if (match == null) return line - const prefix = match[1] ?? 'globs: ' - const value = match[3] ?? '' - if (value.trim().length === 0) return line - return `${prefix}${value}` - }) - return transformedLines.join('\n') - } -} diff --git a/cli/src/plugins/DroidCLIOutputPlugin.ts b/cli/src/plugins/DroidCLIOutputPlugin.ts deleted file mode 100644 index 5845cfc4..00000000 --- a/cli/src/plugins/DroidCLIOutputPlugin.ts +++ /dev/null @@ -1,56 +0,0 @@ -import type { - OutputWriteContext, - SkillPrompt -} from './plugin-core' -import {AbstractOutputPlugin} from './plugin-core' - -const GLOBAL_MEMORY_FILE = 'AGENTS.md' -const GLOBAL_CONFIG_DIR = '.factory' - -export class DroidCLIOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('DroidCLIOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: GLOBAL_MEMORY_FILE, - treatWorkspaceRootProjectAsProject: true, - commands: { - transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} - }, - skills: {}, - cleanup: { - delete: { - project: { - files: [GLOBAL_MEMORY_FILE], - dirs: ['.factory/commands', '.factory/skills'] - }, - global: { - files: ['.factory/AGENTS.md'], - dirs: ['.factory/commands', '.factory/skills'] - } - } - }, - capabilities: { - prompt: { - scopes: ['project', 'global'], - singleScope: false - }, - commands: { - scopes: ['project', 'global'], - singleScope: true - }, - skills: { - scopes: ['project', 'global'], - singleScope: true - } - } - }) // Droid uses default subdir names - } - - protected override buildSkillMainContent(skill: SkillPrompt, ctx?: OutputWriteContext): string { // Droid-specific: Simplify front matter - const simplifiedFrontMatter = skill.yamlFrontMatter != null // Droid-specific: Simplify front matter - ? {name: this.getSkillName(skill), description: skill.yamlFrontMatter.description} - : void 0 - - return this.buildMarkdownContent(skill.content as string, simplifiedFrontMatter, ctx) - } -} diff --git a/cli/src/plugins/EditorConfigOutputPlugin.ts b/cli/src/plugins/EditorConfigOutputPlugin.ts deleted file mode 100644 index 88038b60..00000000 --- a/cli/src/plugins/EditorConfigOutputPlugin.ts +++ /dev/null @@ -1,59 +0,0 @@ -import type { - OutputFileDeclaration, - OutputWriteContext -} from './plugin-core' -import {AbstractOutputPlugin} from './plugin-core' - -const EDITOR_CONFIG_FILE = '.editorconfig' - -/** - * Output plugin for writing .editorconfig files to project directories. - * Reads EditorConfig files collected by EditorConfigInputCapability. - */ -export class EditorConfigOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('EditorConfigOutputPlugin', { - cleanup: { - delete: { - project: { - files: [EDITOR_CONFIG_FILE] - } - } - }, - capabilities: {} - }) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {projects} = ctx.collectedOutputContext.workspace - const {editorConfigFiles} = ctx.collectedOutputContext - - if (editorConfigFiles == null || editorConfigFiles.length === 0) return declarations - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - for (const config of editorConfigFiles) { - declarations.push({ - path: this.resolvePath(projectDir.basePath, projectDir.path, EDITOR_CONFIG_FILE), - scope: 'project', - source: {content: config.content} - }) - } - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - void ctx - const source = declaration.source as {content?: string} - if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) - return source.content - } -} diff --git a/cli/src/plugins/GeminiCLIOutputPlugin.ts b/cli/src/plugins/GeminiCLIOutputPlugin.ts deleted file mode 100644 index 9c4b9bb2..00000000 --- a/cli/src/plugins/GeminiCLIOutputPlugin.ts +++ /dev/null @@ -1,57 +0,0 @@ -import type { - OutputCleanContext, - OutputCleanupDeclarations -} from './plugin-core' -import {AbstractOutputPlugin} from './plugin-core' - -const PROJECT_MEMORY_FILE = 'GEMINI.md' -const GLOBAL_CONFIG_DIR = '.gemini' - -export class GeminiCLIOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('GeminiCLIOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: PROJECT_MEMORY_FILE, - treatWorkspaceRootProjectAsProject: true, - cleanup: { - delete: { - global: { - files: ['.gemini/GEMINI.md'] - } - } - }, - capabilities: { - prompt: { - scopes: ['project', 'global'], - singleScope: false - } - } - }) - } - - override async declareCleanupPaths( - ctx: OutputCleanContext - ): Promise { - const declarations = await super.declareCleanupPaths(ctx) - const promptSourceProjects - = ctx.collectedOutputContext.workspace.projects.filter( - project => project.isPromptSourceProject === true - ) - const promptSourceExcludeGlobs = promptSourceProjects - .map(project => project.dirFromWorkspacePath) - .filter((dir): dir is NonNullable => dir != null) - .map(dir => this.resolvePath(dir.basePath, dir.path, '**')) - - return { - ...declarations, - delete: [ - ...declarations.delete ?? [], - ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) - ], - excludeScanGlobs: [ - ...declarations.excludeScanGlobs ?? [], - ...promptSourceExcludeGlobs - ] - } - } -} diff --git a/cli/src/plugins/GenericSkillsOutputPlugin.test.ts b/cli/src/plugins/GenericSkillsOutputPlugin.test.ts deleted file mode 100644 index 092e54f5..00000000 --- a/cli/src/plugins/GenericSkillsOutputPlugin.test.ts +++ /dev/null @@ -1,192 +0,0 @@ -import type {OutputCleanContext, OutputWriteContext, SkillPrompt} from './plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {GenericSkillsOutputPlugin} from './GenericSkillsOutputPlugin' -import {createLogger, FilePathKind, PromptKind} from './plugin-core' - -class TestGenericSkillsOutputPlugin extends GenericSkillsOutputPlugin { - constructor(private readonly testHomeDir: string) { - super() - } - - protected override getHomeDir(): string { - return this.testHomeDir - } -} - -function createSkillPrompt(scope: 'project' | 'global', name: string): SkillPrompt { - return { - type: PromptKind.Skill, - content: 'skill body', - length: 10, - filePathKind: FilePathKind.Relative, - skillName: name, - dir: { - pathKind: FilePathKind.Relative, - path: `skills/${name}`, - basePath: path.resolve('tmp/dist/skills'), - getDirectoryName: () => name, - getAbsolutePath: () => path.resolve('tmp/dist/skills', name) - }, - yamlFrontMatter: { - description: 'Skill description', - scope - }, - mcpConfig: { - type: PromptKind.SkillMcpConfig, - mcpServers: { - inspector: { - command: 'npx', - args: ['inspector'] - } - }, - rawContent: '{"mcpServers":{"inspector":{"command":"npx","args":["inspector"]}}}' - }, - markdownContents: [] - } as SkillPrompt -} - -function createContext( - workspaceBase: string, - pluginOptions?: OutputWriteContext['pluginOptions'], - skills: readonly SkillPrompt[] = [createSkillPrompt('project', 'ship-it')] -): OutputWriteContext { - return { - logger: createLogger('GenericSkillsOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - pluginOptions, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [{ - name: '__workspace__', - isWorkspaceRootProject: true - }] - }, - skills: [...skills] - } - } as OutputWriteContext -} - -function createCleanContext(): OutputCleanContext { - return { - logger: createLogger('GenericSkillsOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - runtimeTargets: { - jetbrainsCodexDirs: [] - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Relative, - path: '.', - basePath: '.', - getDirectoryName: () => '.', - getAbsolutePath: () => path.resolve('.') - }, - projects: [] - } - } - } as OutputCleanContext -} - -describe('genericSkillsOutputPlugin synthetic workspace project output', () => { - it('writes project-scoped skills into workspace root .agents/skills via the synthetic workspace project', async () => { - const workspaceBase = path.resolve('tmp/generic-skills-workspace') - const plugin = new TestGenericSkillsOutputPlugin(path.resolve('tmp/generic-skills-home')) - const ctx = createContext(workspaceBase) - - const declarations = await plugin.declareOutputFiles(ctx) - - expect(declarations.map(declaration => declaration.path)).toContain( - path.join(workspaceBase, '.agents', 'skills', 'ship-it', 'SKILL.md') - ) - expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) - }) - - it('writes global mcp.json even when skill files stay project-scoped', async () => { - const workspaceBase = path.resolve('tmp/generic-skills-workspace') - const homeDir = path.resolve('tmp/generic-skills-home') - const plugin = new TestGenericSkillsOutputPlugin(homeDir) - const skills = [ - createSkillPrompt('project', 'ship-it'), - createSkillPrompt('global', 'inspect-globally') - ] - const ctx = createContext(workspaceBase, { - outputScopes: { - plugins: { - GenericSkillsOutputPlugin: { - skills: 'project', - mcp: 'global' - } - } - } - }, skills) - - const declarations = await plugin.declareOutputFiles(ctx) - - expect(declarations.map(declaration => declaration.path)).toContain( - path.join(workspaceBase, '.agents', 'skills', 'ship-it', 'SKILL.md') - ) - expect(declarations.map(declaration => declaration.path)).toContain( - path.join(homeDir, '.agents', 'skills', 'inspect-globally', 'mcp.json') - ) - }) - - it('writes project mcp.json even when skill files stay global-scoped', async () => { - const workspaceBase = path.resolve('tmp/generic-skills-workspace') - const homeDir = path.resolve('tmp/generic-skills-home') - const plugin = new TestGenericSkillsOutputPlugin(homeDir) - const skills = [ - createSkillPrompt('project', 'inspect-locally'), - createSkillPrompt('global', 'ship-it') - ] - const ctx = createContext(workspaceBase, { - outputScopes: { - plugins: { - GenericSkillsOutputPlugin: { - skills: 'global', - mcp: 'project' - } - } - } - }, skills) - - const declarations = await plugin.declareOutputFiles(ctx) - - expect(declarations.map(declaration => declaration.path)).toContain( - path.join(homeDir, '.agents', 'skills', 'ship-it', 'SKILL.md') - ) - expect(declarations.map(declaration => declaration.path)).toContain( - path.join(workspaceBase, '.agents', 'skills', 'inspect-locally', 'mcp.json') - ) - }) -}) - -describe('genericSkillsOutputPlugin cleanup', () => { - it('declares cleanup for the full legacy global ~/.skills directory', async () => { - const homeDir = path.resolve('tmp/generic-skills-home') - const plugin = new TestGenericSkillsOutputPlugin(homeDir) - - const cleanup = await plugin.declareCleanupPaths(createCleanContext()) - const deletePaths = cleanup.delete?.map(target => target.path.replaceAll('\\', '/')) ?? [] - - expect(deletePaths).toContain( - path.join(homeDir, '.agents', 'skills').replaceAll('\\', '/') - ) - expect(deletePaths).toContain( - path.join(homeDir, '.skills').replaceAll('\\', '/') - ) - }) -}) diff --git a/cli/src/plugins/GenericSkillsOutputPlugin.ts b/cli/src/plugins/GenericSkillsOutputPlugin.ts deleted file mode 100644 index c7698ab3..00000000 --- a/cli/src/plugins/GenericSkillsOutputPlugin.ts +++ /dev/null @@ -1,245 +0,0 @@ -import type { - OutputFileDeclaration, - OutputWriteContext, - SkillPrompt -} from './plugin-core' - -import {Buffer} from 'node:buffer' -import {AbstractOutputPlugin, filterByProjectConfig} from './plugin-core' - -const PROJECT_SKILLS_DIR = '.agents/skills' -const LEGACY_SKILLS_DIR = '.skills' -const SKILL_FILE_NAME = 'SKILL.md' -const MCP_CONFIG_FILE = 'mcp.json' - -type GenericSkillOutputSource - = | {readonly kind: 'skillMain', readonly skill: SkillPrompt} - | {readonly kind: 'skillMcp', readonly rawContent: string} - | {readonly kind: 'skillChildDoc', readonly content: string} - | { - readonly kind: 'skillResource' - readonly content: string - readonly encoding: 'text' | 'base64' - } - -/** - * Output plugin that writes skills directly to each project's .agents/skills/ directory. - * - * Structure: - * - Project: /.agents/skills//SKILL.md, mcp.json, child docs, resources - * - * @deprecated Legacy compact skills output. Cleanup must remove the entire - * global `~/.skills/` directory in addition to the current skill targets. - */ -export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('GenericSkillsOutputPlugin', { - outputFileName: SKILL_FILE_NAME, - treatWorkspaceRootProjectAsProject: true, - skills: {}, - cleanup: { - delete: { - project: { - dirs: [PROJECT_SKILLS_DIR, LEGACY_SKILLS_DIR] - }, - global: { - dirs: [PROJECT_SKILLS_DIR, LEGACY_SKILLS_DIR] - } - } - }, - capabilities: { - skills: { - scopes: ['project', 'global'], - singleScope: true - }, - mcp: { - scopes: ['project', 'global'], - singleScope: true - } - } - }) - } - - override async declareOutputFiles( - ctx: OutputWriteContext - ): Promise { - const declarations: OutputFileDeclaration[] = [] - const {skills} = ctx.collectedOutputContext - - if (skills == null || skills.length === 0) return declarations - - const selectedSkills = this.selectSingleScopeItems( - skills, - this.skillsConfig.sourceScopes, - skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'skills') - ) - const selectedMcpSkills = this.selectSingleScopeItems( - skills, - this.skillsConfig.sourceScopes, - skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'mcp') - ?? this.getTopicScopeOverride(ctx, 'skills') - ) - - const pushSkillDeclarations = ( - baseSkillsDir: string, - scope: 'project' | 'global', - filteredSkills: readonly SkillPrompt[] - ): void => { - for (const skill of filteredSkills) { - const skillName = this.getSkillName(skill) - const skillDir = this.joinPath(baseSkillsDir, skillName) - - declarations.push({ - path: this.joinPath(skillDir, SKILL_FILE_NAME), - scope, - source: { - kind: 'skillMain', - skill - } satisfies GenericSkillOutputSource - }) - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) { - declarations.push({ - path: this.joinPath( - skillDir, - childDoc.relativePath.replace(/\.mdx$/, '.md') - ), - scope, - source: { - kind: 'skillChildDoc', - content: childDoc.content as string - } satisfies GenericSkillOutputSource - }) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - declarations.push({ - path: this.joinPath(skillDir, resource.relativePath), - scope, - source: { - kind: 'skillResource', - content: resource.content, - encoding: resource.encoding - } satisfies GenericSkillOutputSource - }) - } - } - } - } - - const pushMcpDeclarations = ( - baseSkillsDir: string, - scope: 'project' | 'global', - filteredMcpSkills: readonly SkillPrompt[] - ): void => { - for (const skill of filteredMcpSkills) { - if (skill.mcpConfig == null) continue - - declarations.push({ - path: this.joinPath( - baseSkillsDir, - this.getSkillName(skill), - MCP_CONFIG_FILE - ), - scope, - source: { - kind: 'skillMcp', - rawContent: skill.mcpConfig.rawContent - } satisfies GenericSkillOutputSource - }) - } - } - - if ( - selectedSkills.selectedScope === 'project' - || selectedMcpSkills.selectedScope === 'project' - ) { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (projectRootDir == null) continue - - const filteredSkills = filterByProjectConfig( - selectedSkills.items, - project.projectConfig, - 'skills' - ) - const filteredMcpSkills = filterByProjectConfig( - selectedMcpSkills.items, - project.projectConfig, - 'skills' - ) - const baseSkillsDir = this.joinPath(projectRootDir, PROJECT_SKILLS_DIR) - - if ( - selectedSkills.selectedScope === 'project' - && filteredSkills.length > 0 - ) - { pushSkillDeclarations(baseSkillsDir, 'project', filteredSkills) } - - if (selectedMcpSkills.selectedScope === 'project') - { pushMcpDeclarations(baseSkillsDir, 'project', filteredMcpSkills) } - } - } - - if ( - selectedSkills.selectedScope !== 'global' - && selectedMcpSkills.selectedScope !== 'global' - ) - { return declarations } - - const baseSkillsDir = this.joinPath(this.getHomeDir(), PROJECT_SKILLS_DIR) - const promptSourceProjectConfig - = this.resolvePromptSourceProjectConfig(ctx) - if (selectedSkills.selectedScope === 'global') { - const filteredSkills = filterByProjectConfig( - selectedSkills.items, - promptSourceProjectConfig, - 'skills' - ) - if (filteredSkills.length > 0) - { pushSkillDeclarations(baseSkillsDir, 'global', filteredSkills) } - } - - if (selectedMcpSkills.selectedScope !== 'global') return declarations - - const filteredMcpSkills = filterByProjectConfig( - selectedMcpSkills.items, - promptSourceProjectConfig, - 'skills' - ) - pushMcpDeclarations(baseSkillsDir, 'global', filteredMcpSkills) - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - const source = declaration.source as GenericSkillOutputSource - switch (source.kind) { - case 'skillMain': { - const frontMatterData = this.buildSkillFrontMatter(source.skill) - return this.buildMarkdownContent( - source.skill.content as string, - frontMatterData, - ctx - ) - } - case 'skillMcp': - return source.rawContent - case 'skillChildDoc': - return source.content - case 'skillResource': - return source.encoding === 'base64' - ? Buffer.from(source.content, 'base64') - : source.content - default: - throw new Error(`Unsupported declaration source for ${this.name}`) - } - } -} diff --git a/cli/src/plugins/GitExcludeOutputPlugin.ts b/cli/src/plugins/GitExcludeOutputPlugin.ts deleted file mode 100644 index 8f20b92d..00000000 --- a/cli/src/plugins/GitExcludeOutputPlugin.ts +++ /dev/null @@ -1,90 +0,0 @@ -import type { - OutputFileDeclaration, - OutputWriteContext -} from './plugin-core' -import * as path from 'node:path' -import {AbstractOutputPlugin, findAllGitRepos, resolveGitInfoDir} from './plugin-core' - -export class GitExcludeOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('GitExcludeOutputPlugin', {capabilities: {}}) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {workspace, globalGitIgnore, shadowGitExclude} = ctx.collectedOutputContext - const managedContent = this.buildManagedContent(globalGitIgnore, shadowGitExclude) - if (managedContent.length === 0) return declarations - - const finalContent = this.normalizeContent(managedContent) - const writtenPaths = new Set() - const {projects} = workspace - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - - const projectDir = project.dirFromWorkspacePath.getAbsolutePath() - const gitRepoDirs = [projectDir, ...findAllGitRepos(projectDir)] - - for (const repoDir of gitRepoDirs) { - const gitInfoDir = resolveGitInfoDir(repoDir) - if (gitInfoDir == null) continue - - const excludePath = path.join(gitInfoDir, 'exclude') - if (writtenPaths.has(excludePath)) continue - writtenPaths.add(excludePath) - - declarations.push({ - path: excludePath, - scope: 'project', - source: {content: finalContent} - }) - } - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - void ctx - const source = declaration.source as {content?: string} - if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) - return source.content - } - - private buildManagedContent(globalGitIgnore?: string, shadowGitExclude?: string): string { - const parts: string[] = [] - - if (globalGitIgnore != null && globalGitIgnore.trim().length > 0) { // Handle globalGitIgnore first - const sanitized = this.sanitizeContent(globalGitIgnore) - if (sanitized.length > 0) parts.push(sanitized) - } - - if (shadowGitExclude != null && shadowGitExclude.trim().length > 0) { // Handle shadowGitExclude - const sanitized = this.sanitizeContent(shadowGitExclude) - if (sanitized.length > 0) parts.push(sanitized) - } - - if (parts.length === 0) return '' // Return early if no content was added - return parts.join('\n') - } - - private sanitizeContent(content: string): string { - const lines = content.split(/\r?\n/) - const filtered = lines.filter(line => { - const trimmed = line.trim() - if (trimmed.length === 0) return true - return !(trimmed.startsWith('#') && !trimmed.startsWith('\\#')) - }) - return filtered.join('\n').trim() - } - - private normalizeContent(content: string): string { - const trimmed = content.trim() - if (trimmed.length === 0) return '' - return `${trimmed}\n` - } -} diff --git a/cli/src/plugins/JetBrainsAIAssistantCodexOutputPlugin.ts b/cli/src/plugins/JetBrainsAIAssistantCodexOutputPlugin.ts deleted file mode 100644 index 91c0b9c5..00000000 --- a/cli/src/plugins/JetBrainsAIAssistantCodexOutputPlugin.ts +++ /dev/null @@ -1,366 +0,0 @@ -import type { - CommandPrompt, - OutputCleanContext, - OutputCleanupDeclarations, - OutputFileDeclaration, - OutputPluginContext, - OutputWriteContext, - ProjectChildrenMemoryPrompt, - SkillPrompt -} from './plugin-core' -import {Buffer} from 'node:buffer' -import * as path from 'node:path' -import {AbstractOutputPlugin, filterByProjectConfig, PLUGIN_NAMES} from './plugin-core' - -const PROJECT_MEMORY_FILE = 'AGENTS.md' -const PROMPTS_SUBDIR = 'prompts' -const SKILLS_SUBDIR = 'skills' -const SKILL_FILE_NAME = 'SKILL.md' -const AIASSISTANT_DIR = '.aiassistant' -const CODEX_DIR = 'codex' -const RULES_SUBDIR = 'rules' -const ROOT_RULE_FILE = 'always.md' -const CHILD_RULE_FILE_PREFIX = 'glob-' -const RULE_APPLY_ALWAYS = '始终' -const RULE_APPLY_GLOB = '按文件模式' -const RULE_GLOB_KEY = '模式' -type JetBrainsCodexOutputSource - = | {readonly kind: 'projectRuleContent', readonly content: string} - | {readonly kind: 'globalMemory', readonly content: string} - | {readonly kind: 'command', readonly command: CommandPrompt} - | {readonly kind: 'skill', readonly skill: SkillPrompt} - | {readonly kind: 'skillReference', readonly content: string} - | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} - | {readonly kind: 'ignoreFile', readonly content: string} - -export class JetBrainsAIAssistantCodexOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('JetBrainsAIAssistantCodexOutputPlugin', { - outputFileName: PROJECT_MEMORY_FILE, - treatWorkspaceRootProjectAsProject: true, - commands: { - subDir: PROMPTS_SUBDIR, - transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} - }, - skills: { - subDir: SKILLS_SUBDIR - }, - dependsOn: [PLUGIN_NAMES.AgentsOutput], - indexignore: '.aiignore', - cleanup: { - delete: { - project: { - dirs: ['.aiassistant/rules', '.aiassistant/codex/prompts', '.aiassistant/codex/skills'] - } - } - }, - capabilities: { - prompt: { - scopes: ['project', 'global'], - singleScope: false - }, - commands: { - scopes: ['project', 'global'], - singleScope: true - }, - skills: { - scopes: ['project', 'global'], - singleScope: true - } - } - }) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {globalMemory, commands, skills, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext - const concreteProjects = this.getConcreteProjects(ctx) - const promptProjects = this.getProjectPromptOutputProjects(ctx) - const codexDirs = this.getJetBrainsCodexDirs(ctx) - const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) - const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) - const selectedCommands = commands != null - ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, command => this.resolveCommandSourceScope(command), this.getTopicScopeOverride(ctx, 'commands')) - : {items: [] as readonly CommandPrompt[]} - const selectedSkills = skills != null - ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) - : {items: [] as readonly SkillPrompt[]} - const transformOptions = this.getTransformOptionsFromContext(ctx) - - if (activePromptScopes.has('project')) { - for (const project of promptProjects) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (projectRootDir == null) continue - const rulesDir = path.join(projectRootDir, AIASSISTANT_DIR, RULES_SUBDIR) - - if (project.rootMemoryPrompt != null) { - declarations.push({ - path: path.join(rulesDir, ROOT_RULE_FILE), - scope: 'project', - source: { - kind: 'projectRuleContent', - content: this.buildAlwaysRuleContent(project.rootMemoryPrompt.content as string, ctx) - } satisfies JetBrainsCodexOutputSource - }) - } - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) { - declarations.push({ - path: path.join(rulesDir, this.buildChildRuleFileName(child)), - scope: 'project', - source: { - kind: 'projectRuleContent', - content: this.buildGlobRuleContent(child, ctx) - } satisfies JetBrainsCodexOutputSource - }) - } - } - } - } - - const pushSkillDeclarations = ( - basePath: string, - scope: 'project' | 'global', - filteredSkills: readonly SkillPrompt[] - ): void => { - for (const skill of filteredSkills) { - const skillName = this.getSkillName(skill) - const skillDir = path.join(basePath, SKILLS_SUBDIR, skillName) - declarations.push({ - path: path.join(skillDir, SKILL_FILE_NAME), - scope, - source: {kind: 'skill', skill} satisfies JetBrainsCodexOutputSource - }) - - if (skill.childDocs != null) { - for (const refDoc of skill.childDocs) { - declarations.push({ - path: path.join(skillDir, refDoc.dir.path.replace(/\.mdx$/, '.md')), - scope, - source: { - kind: 'skillReference', - content: refDoc.content as string - } satisfies JetBrainsCodexOutputSource - }) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - declarations.push({ - path: path.join(skillDir, resource.relativePath), - scope, - source: { - kind: 'skillResource', - content: resource.content, - encoding: resource.encoding - } satisfies JetBrainsCodexOutputSource - }) - } - } - } - } - - if (selectedCommands.selectedScope === 'project' || selectedSkills.selectedScope === 'project') { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (projectRootDir == null) continue - - const projectCodexDir = path.join(projectRootDir, AIASSISTANT_DIR, CODEX_DIR) - if (selectedCommands.selectedScope === 'project') { - const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') - for (const command of filteredCommands) { - declarations.push({ - path: path.join(projectCodexDir, PROMPTS_SUBDIR, this.transformCommandName(command, transformOptions)), - scope: 'project', - source: {kind: 'command', command} satisfies JetBrainsCodexOutputSource - }) - } - } - - if (selectedSkills.selectedScope === 'project') { - const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') - pushSkillDeclarations(projectCodexDir, 'project', filteredSkills) - } - } - } - - if (codexDirs.length > 0) { - if (globalMemory != null && activePromptScopes.has('global')) { - for (const codexDir of codexDirs) { - declarations.push({ - path: path.join(codexDir, PROJECT_MEMORY_FILE), - scope: 'global', - source: { - kind: 'globalMemory', - content: globalMemory.content as string - } satisfies JetBrainsCodexOutputSource - }) - } - } - - const filteredCommands = selectedCommands.selectedScope === 'global' - ? filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') - : [] - const filteredSkills = selectedSkills.selectedScope === 'global' - ? filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') - : [] - for (const codexDir of codexDirs) { - for (const command of filteredCommands) { - declarations.push({ - path: path.join(codexDir, PROMPTS_SUBDIR, this.transformCommandName(command, transformOptions)), - scope: 'global', - source: {kind: 'command', command} satisfies JetBrainsCodexOutputSource - }) - } - - pushSkillDeclarations(codexDir, 'global', filteredSkills) - } - } - - const ignoreOutputPath = this.getIgnoreOutputPath() - const ignoreFile = this.indexignore == null - ? void 0 - : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) - if (ignoreOutputPath != null && ignoreFile != null) { - for (const project of concreteProjects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null || project.isPromptSourceProject === true) continue - declarations.push({ - path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), - scope: 'project', - source: { - kind: 'ignoreFile', - content: ignoreFile.content - } satisfies JetBrainsCodexOutputSource - }) - } - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - const source = declaration.source as JetBrainsCodexOutputSource - switch (source.kind) { - case 'projectRuleContent': - case 'globalMemory': - case 'skillReference': - case 'ignoreFile': return source.content - case 'command': return this.buildCommandContent(source.command, ctx) - case 'skill': return this.buildCodexSkillContent(source.skill, ctx) - case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content - default: throw new Error(`Unsupported declaration source for ${this.name}`) - } - } - - override async declareCleanupPaths(ctx: OutputCleanContext): Promise { - const baseDeclarations = await super.declareCleanupPaths(ctx) - const codexDirs = this.getJetBrainsCodexDirs(ctx) - if (codexDirs.length === 0) return baseDeclarations - - const dynamicGlobalDeletes = codexDirs.flatMap(codexDir => ([ - {path: path.join(codexDir, PROJECT_MEMORY_FILE), kind: 'file', scope: 'global'}, - {path: path.join(codexDir, PROMPTS_SUBDIR), kind: 'directory', scope: 'global'}, - {path: path.join(codexDir, SKILLS_SUBDIR), kind: 'directory', scope: 'global'} - ] as const)) - const baseDeletes = baseDeclarations.delete ?? [] - - return { - ...baseDeclarations, - delete: [ - ...baseDeletes, - ...dynamicGlobalDeletes - ] - } - } - - private getJetBrainsCodexDirs(ctx: OutputPluginContext | OutputWriteContext | OutputCleanContext): readonly string[] { - return ctx.runtimeTargets.jetbrainsCodexDirs - } - - private buildChildRuleFileName(child: ProjectChildrenMemoryPrompt): string { - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const normalizedPath = childPath - .replaceAll('\\', '/') - .replaceAll(/^\/+|\/+$/g, '') - .replaceAll('/', '-') - - const suffix = normalizedPath.length > 0 ? normalizedPath : 'root' - return `${CHILD_RULE_FILE_PREFIX}${suffix}.md` - } - - private buildChildRulePattern(child: ProjectChildrenMemoryPrompt): string { - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const normalizedPath = childPath - .replaceAll('\\', '/') - .replaceAll(/^\/+|\/+$/g, '') - - if (normalizedPath.length === 0) return '**/*' - return `${normalizedPath}/**` - } - - private buildAlwaysRuleContent(content: string, ctx: OutputWriteContext): string { - const fmData: Record = { - apply: RULE_APPLY_ALWAYS - } - - return this.buildMarkdownContent(content, fmData, ctx) - } - - private buildGlobRuleContent(child: ProjectChildrenMemoryPrompt, ctx: OutputWriteContext): string { - const pattern = this.buildChildRulePattern(child) - const fmData: Record = { - apply: RULE_APPLY_GLOB, - [RULE_GLOB_KEY]: pattern - } - - return this.buildMarkdownContent(child.content as string, fmData, ctx) - } - - private buildCodexSkillContent(skill: SkillPrompt, ctx: OutputWriteContext): string { - const fm = skill.yamlFrontMatter - - const name = this.normalizeSkillName(this.getSkillName(skill), 64) - const description = this.normalizeToSingleLine(fm.description, 1024) - - const metadata: Record = {} - - if (fm.displayName != null) metadata['short-description'] = fm.displayName - if (fm.version != null) metadata['version'] = fm.version - if (fm.author != null) metadata['author'] = fm.author - if (fm.keywords != null && fm.keywords.length > 0) metadata['keywords'] = [...fm.keywords] - - const fmData: Record = { - name, - description - } - - if (Object.keys(metadata).length > 0) fmData['metadata'] = metadata - if (fm.allowTools != null && fm.allowTools.length > 0) fmData['allowed-tools'] = fm.allowTools.join(' ') - - return this.buildMarkdownContent(skill.content as string, fmData, ctx) - } - - private normalizeSkillName(name: string, maxLength: number): string { - let normalized = name - .toLowerCase() - .replaceAll(/[^a-z0-9-]/g, '-') - .replaceAll(/-+/g, '-') - .replaceAll(/^-+|-+$/g, '') - - if (normalized.length > maxLength) normalized = normalized.slice(0, maxLength).replace(/-+$/, '') - - return normalized - } - - private normalizeToSingleLine(text: string, maxLength: number): string { - const singleLine = text.replaceAll(/[\r\n]+/g, ' ').replaceAll(/\s+/g, ' ').trim() - if (singleLine.length > maxLength) return `${singleLine.slice(0, maxLength - 3)}...` - return singleLine - } -} diff --git a/cli/src/plugins/JetBrainsIDECodeStyleConfigOutputPlugin.ts b/cli/src/plugins/JetBrainsIDECodeStyleConfigOutputPlugin.ts deleted file mode 100644 index aa49103c..00000000 --- a/cli/src/plugins/JetBrainsIDECodeStyleConfigOutputPlugin.ts +++ /dev/null @@ -1,68 +0,0 @@ -import type { - OutputFileDeclaration, - OutputWriteContext -} from './plugin-core' -import {AbstractOutputPlugin, IDEKind} from './plugin-core' - -const IDEA_DIR = '.idea' -const CODE_STYLES_DIR = 'codeStyles' - -export class JetBrainsIDECodeStyleConfigOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('JetBrainsIDECodeStyleConfigOutputPlugin', { - cleanup: { - delete: { - project: { - files: ['.editorconfig', '.idea/codeStyles/Project.xml', '.idea/codeStyles/codeStyleConfig.xml', '.idea/.gitignore'] - } - } - }, - capabilities: {} - }) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {projects} = ctx.collectedOutputContext.workspace - const {jetbrainsConfigFiles, editorConfigFiles} = ctx.collectedOutputContext - const jetbrainsConfigs = [...jetbrainsConfigFiles ?? [], ...editorConfigFiles ?? []] - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - for (const config of jetbrainsConfigs) { - const targetRelativePath = this.getTargetRelativePath(config) - declarations.push({ - path: this.resolvePath(projectDir.basePath, projectDir.path, targetRelativePath), - scope: 'project', - source: {content: config.content} - }) - } - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - void ctx - const source = declaration.source as {content?: string} - if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) - return source.content - } - - private getTargetRelativePath(config: {type: IDEKind, dir: {path: string}}): string { - const sourcePath = config.dir.path - - if (config.type === IDEKind.EditorConfig) return '.editorconfig' - - if (config.type !== IDEKind.IntellijIDEA) return this.basename(sourcePath) - - const ideaIndex = sourcePath.indexOf(IDEA_DIR) - if (ideaIndex !== -1) return sourcePath.slice(Math.max(0, ideaIndex)) - return this.joinPath(IDEA_DIR, CODE_STYLES_DIR, this.basename(sourcePath)) - } -} diff --git a/cli/src/plugins/OpencodeCLIOutputPlugin.test.ts b/cli/src/plugins/OpencodeCLIOutputPlugin.test.ts deleted file mode 100644 index ed51fbc8..00000000 --- a/cli/src/plugins/OpencodeCLIOutputPlugin.test.ts +++ /dev/null @@ -1,118 +0,0 @@ -import type {OutputCleanContext, OutputWriteContext, SubAgentPrompt} from './plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {OpencodeCLIOutputPlugin} from './OpencodeCLIOutputPlugin' -import {createLogger, FilePathKind, PromptKind} from './plugin-core' - -class TestOpencodeCLIOutputPlugin extends OpencodeCLIOutputPlugin { - constructor(private readonly testHomeDir: string) { - super() - } - - protected override getHomeDir(): string { - return this.testHomeDir - } -} - -function createCleanContext(): OutputCleanContext { - return { - logger: createLogger('OpencodeCLIOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - runtimeTargets: {}, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Relative, - path: '.', - basePath: '.', - getDirectoryName: () => '.', - getAbsolutePath: () => path.resolve('.') - }, - projects: [] - } - } - } as unknown as OutputCleanContext -} - -function createSubAgentPrompt(scope: 'project' | 'global'): SubAgentPrompt { - return { - type: PromptKind.SubAgent, - content: 'subagent body', - length: 13, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'subagents/reviewer.mdx', - basePath: path.resolve('tmp/dist/subagents'), - getDirectoryName: () => 'reviewer', - getAbsolutePath: () => path.resolve('tmp/dist/subagents/reviewer.mdx') - }, - agentPrefix: 'ops', - agentName: 'reviewer', - canonicalName: 'ops-reviewer', - yamlFrontMatter: { - description: 'Reviewer', - scope, - namingCase: 'kebab-case' - }, - markdownContents: [] - } as unknown as SubAgentPrompt -} - -describe('opencodeCLIOutputPlugin synthetic workspace project output', () => { - it('writes project-scoped subagents into workspace root .opencode/agents via the synthetic workspace project', async () => { - const workspaceBase = path.resolve('tmp/opencode-workspace') - const plugin = new OpencodeCLIOutputPlugin() - const ctx = { - logger: createLogger('OpencodeCLIOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - runtimeTargets: {}, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [ - { - name: '__workspace__', - isWorkspaceRootProject: true - } - ] - }, - subAgents: [createSubAgentPrompt('project')] - } - } as unknown as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - - expect(declarations.map(declaration => declaration.path)).toContain(path.join(workspaceBase, '.opencode', 'agents', 'ops-reviewer.md')) - expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) - }) -}) - -describe('opencodeCLIOutputPlugin cleanup', () => { - it('keeps global opencode.json out of cleanup delete targets', async () => { - const tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-opencode-cleanup-')) - - try { - const plugin = new TestOpencodeCLIOutputPlugin(tempHomeDir) - const cleanup = await plugin.declareCleanupPaths(createCleanContext()) - const deletePaths = cleanup.delete?.map(target => target.path.replaceAll('\\', '/')) ?? [] - - expect(deletePaths).toContain(path.join(tempHomeDir, '.config', 'opencode', 'AGENTS.md').replaceAll('\\', '/')) - expect(deletePaths).not.toContain(path.join(tempHomeDir, '.config', 'opencode', 'opencode.json').replaceAll('\\', '/')) - } finally { - fs.rmSync(tempHomeDir, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/plugins/OpencodeCLIOutputPlugin.ts b/cli/src/plugins/OpencodeCLIOutputPlugin.ts deleted file mode 100644 index 72f6564a..00000000 --- a/cli/src/plugins/OpencodeCLIOutputPlugin.ts +++ /dev/null @@ -1,499 +0,0 @@ -import type {CommandPrompt, OutputFileDeclaration, OutputWriteContext, RulePrompt, SkillPrompt, SubAgentPrompt} from './plugin-core' -import {Buffer} from 'node:buffer' -import * as path from 'node:path' -import {AbstractOutputPlugin, applySubSeriesGlobPrefix, filterByProjectConfig, PLUGIN_NAMES} from './plugin-core' - -const GLOBAL_MEMORY_FILE = 'AGENTS.md' -const GLOBAL_CONFIG_DIR = '.config/opencode' -const OPENCODE_CONFIG_FILE = 'opencode.json' -const OPENCODE_RULES_PLUGIN_NAME = 'opencode-rules@latest' -const PROJECT_RULES_DIR = '.opencode' -const COMMANDS_SUBDIR = 'commands' -const AGENTS_SUBDIR = 'agents' -const SKILLS_SUBDIR = 'skills' -const RULES_SUBDIR = 'rules' - -type OpencodeOutputSource - = | {readonly kind: 'globalMemory', readonly content: string} - | {readonly kind: 'projectRootMemory', readonly content: string} - | {readonly kind: 'projectChildMemory', readonly content: string} - | {readonly kind: 'command', readonly command: CommandPrompt} - | {readonly kind: 'subAgent', readonly agent: SubAgentPrompt} - | {readonly kind: 'skillMain', readonly skill: SkillPrompt, readonly normalizedSkillName: string} - | {readonly kind: 'skillReference', readonly content: string} - | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} - | {readonly kind: 'mcpConfig', readonly mcpServers: Record>} - | {readonly kind: 'rule', readonly rule: RulePrompt} - -function transformOpencodeCommandFrontMatter( - _cmd: CommandPrompt, - context: { - readonly sourceFrontMatter?: Record - } -): Record { - const frontMatter: Record = {} - const source = context.sourceFrontMatter - - if (source?.['description'] != null) frontMatter['description'] = source['description'] - if (source?.['agent'] != null) frontMatter['agent'] = source['agent'] - if (source?.['model'] != null) frontMatter['model'] = source['model'] - - if (source?.['allowTools'] != null && Array.isArray(source['allowTools'])) { - const tools: Record = {} - for (const tool of source['allowTools']) tools[String(tool)] = true - frontMatter['tools'] = tools - } - - for (const [key, value] of Object.entries(source ?? {})) { - if (!['description', 'agent', 'model', 'allowTools', 'namingCase', 'argumentHint'].includes(key)) frontMatter[key] = value - } - - return frontMatter -} - -export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('OpencodeCLIOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: GLOBAL_MEMORY_FILE, - treatWorkspaceRootProjectAsProject: true, - commands: { - subDir: COMMANDS_SUBDIR, - transformFrontMatter: transformOpencodeCommandFrontMatter - }, - subagents: { - subDir: AGENTS_SUBDIR - }, - skills: { - subDir: SKILLS_SUBDIR - }, - rules: { - subDir: RULES_SUBDIR, - prefix: 'rule', - sourceScopes: ['project', 'global'] - }, - cleanup: { - delete: { - project: { - files: [GLOBAL_MEMORY_FILE, '.opencode/opencode.json'], - dirs: ['.opencode/commands', '.opencode/agents', '.opencode/skills', '.opencode/rules'] - }, - global: { - files: ['.config/opencode/AGENTS.md'], - dirs: ['.config/opencode/commands', '.config/opencode/agents', '.config/opencode/skills', '.config/opencode/rules'] - }, - xdgConfig: { - files: ['opencode/AGENTS.md'], - dirs: ['opencode/commands', 'opencode/agents', 'opencode/skills', 'opencode/rules'] - } - } - }, - dependsOn: [PLUGIN_NAMES.AgentsOutput], - capabilities: { - prompt: { - scopes: ['project', 'global'], - singleScope: false - }, - rules: { - scopes: ['project', 'global'], - singleScope: false - }, - commands: { - scopes: ['project', 'global'], - singleScope: true - }, - subagents: { - scopes: ['project', 'global'], - singleScope: true - }, - skills: { - scopes: ['project', 'global'], - singleScope: true - }, - mcp: { - scopes: ['project', 'global'], - singleScope: true - } - } - }) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {globalMemory, commands, subAgents, skills, rules} = ctx.collectedOutputContext - const globalDir = this.getGlobalConfigDir() - const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) - const promptProjects = this.getProjectPromptOutputProjects(ctx) - const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) - const selectedCommands - = commands != null - ? this.selectSingleScopeItems( - commands, - this.commandsConfig.sourceScopes, - command => this.resolveCommandSourceScope(command), - this.getTopicScopeOverride(ctx, 'commands') - ) - : {items: [] as readonly CommandPrompt[]} - const selectedSubAgents - = subAgents != null - ? this.selectSingleScopeItems( - subAgents, - this.subAgentsConfig.sourceScopes, - subAgent => this.resolveSubAgentSourceScope(subAgent), - this.getTopicScopeOverride(ctx, 'subagents') - ) - : {items: [] as readonly SubAgentPrompt[]} - const selectedSkills - = skills != null - ? this.selectSingleScopeItems( - skills, - this.skillsConfig.sourceScopes, - skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'skills') - ) - : {items: [] as readonly SkillPrompt[]} - const selectedMcpSkills - = skills != null - ? this.selectSingleScopeItems( - skills, - this.skillsConfig.sourceScopes, - skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') - ) - : {items: [] as readonly SkillPrompt[]} - - if (globalMemory != null && activePromptScopes.has('global')) { - declarations.push({ - path: path.join(globalDir, GLOBAL_MEMORY_FILE), - scope: 'global', - source: { - kind: 'globalMemory', - content: globalMemory.content as string - } satisfies OpencodeOutputSource - }) - } - - const pushSkillDeclarations = (basePath: string, scope: 'project' | 'global', filteredSkills: readonly SkillPrompt[]): void => { - for (const skill of filteredSkills) { - const normalizedSkillName = this.validateAndNormalizeSkillName(this.getSkillName(skill)) - const skillDir = path.join(basePath, SKILLS_SUBDIR, normalizedSkillName) - - declarations.push({ - path: path.join(skillDir, 'SKILL.md'), - scope, - source: { - kind: 'skillMain', - skill, - normalizedSkillName - } satisfies OpencodeOutputSource - }) - - if (skill.childDocs != null) { - for (const refDoc of skill.childDocs) { - declarations.push({ - path: path.join(skillDir, refDoc.dir.path.replace(/\.mdx$/, '.md')), - scope, - source: { - kind: 'skillReference', - content: refDoc.content as string - } satisfies OpencodeOutputSource - }) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - declarations.push({ - path: path.join(skillDir, resource.relativePath), - scope, - source: { - kind: 'skillResource', - content: resource.content, - encoding: resource.encoding - } satisfies OpencodeOutputSource - }) - } - } - } - } - - const pushMcpDeclaration = (basePath: string, scope: 'project' | 'global', _filteredSkills: readonly SkillPrompt[]): void => { - void _filteredSkills - declarations.push({ - path: path.join(basePath, OPENCODE_CONFIG_FILE), - scope, - source: { - kind: 'mcpConfig', - mcpServers: {} - } satisfies OpencodeOutputSource - }) - } - - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - for (const project of promptProjects) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (projectRootDir == null) continue - - if (project.rootMemoryPrompt != null && activePromptScopes.has('project')) { - declarations.push({ - path: path.join(projectRootDir, GLOBAL_MEMORY_FILE), - scope: 'project', - source: { - kind: 'projectRootMemory', - content: project.rootMemoryPrompt.content as string - } satisfies OpencodeOutputSource - }) - } - - if (project.childMemoryPrompts != null && activePromptScopes.has('project')) { - for (const child of project.childMemoryPrompts) { - declarations.push({ - path: this.resolveFullPath(child.dir), - scope: 'project', - source: { - kind: 'projectChildMemory', - content: child.content as string - } satisfies OpencodeOutputSource - }) - } - } - } - - if ( - selectedCommands.selectedScope === 'project' - || selectedSubAgents.selectedScope === 'project' - || selectedSkills.selectedScope === 'project' - || selectedMcpSkills.selectedScope === 'project' - ) { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (projectRootDir == null) continue - const basePath = path.join(projectRootDir, PROJECT_RULES_DIR) - - const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') - if (selectedCommands.selectedScope === 'project') { - for (const command of filteredCommands) { - declarations.push({ - path: path.join(basePath, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), - scope: 'project', - source: {kind: 'command', command} satisfies OpencodeOutputSource - }) - } - } - - const filteredSubAgents = filterByProjectConfig(selectedSubAgents.items, project.projectConfig, 'subAgents') - if (selectedSubAgents.selectedScope === 'project') { - for (const agent of filteredSubAgents) { - declarations.push({ - path: path.join(basePath, AGENTS_SUBDIR, this.transformSubAgentName(agent)), - scope: 'project', - source: {kind: 'subAgent', agent} satisfies OpencodeOutputSource - }) - } - } - - const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') - if (selectedSkills.selectedScope === 'project') pushSkillDeclarations(basePath, 'project', filteredSkills) - - if (selectedMcpSkills.selectedScope === 'project') { - const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, project.projectConfig, 'skills') - pushMcpDeclaration(basePath, 'project', filteredMcpSkills) - } - } - } - - if (selectedCommands.selectedScope === 'global') { - const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') - for (const command of filteredCommands) { - declarations.push({ - path: path.join(globalDir, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), - scope: 'global', - source: {kind: 'command', command} satisfies OpencodeOutputSource - }) - } - } - - if (selectedSubAgents.selectedScope === 'global') { - const filteredSubAgents = filterByProjectConfig(selectedSubAgents.items, promptSourceProjectConfig, 'subAgents') - for (const agent of filteredSubAgents) { - declarations.push({ - path: path.join(globalDir, AGENTS_SUBDIR, this.transformSubAgentName(agent)), - scope: 'global', - source: {kind: 'subAgent', agent} satisfies OpencodeOutputSource - }) - } - } - - if (selectedSkills.selectedScope === 'global') { - const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') - pushSkillDeclarations(globalDir, 'global', filteredSkills) - } - - if (selectedMcpSkills.selectedScope === 'global') { - const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, promptSourceProjectConfig, 'skills') - pushMcpDeclaration(globalDir, 'global', filteredMcpSkills) - } - - // Keep opencode.json managed so the generated config can preserve user fields - // while normalizing the MCP section to an empty object. - - if (rules == null || rules.length === 0) return declarations - - const activeRuleScopes = this.selectRuleScopes(ctx, rules) - for (const ruleScope of activeRuleScopes) { - if (ruleScope === 'global') { - const globalRules = rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'global') - for (const rule of globalRules) { - declarations.push({ - path: path.join(globalDir, RULES_SUBDIR, this.buildRuleFileName(rule)), - scope: 'global', - source: {kind: 'rule', rule} satisfies OpencodeOutputSource - }) - } - } else if (ruleScope === 'project') { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (projectRootDir == null) continue - const basePath = path.join(projectRootDir, PROJECT_RULES_DIR) - - const projectRules = applySubSeriesGlobPrefix( - filterByProjectConfig( - rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'project'), - project.projectConfig, - 'rules' - ), - project.projectConfig - ) - for (const rule of projectRules) { - declarations.push({ - path: path.join(basePath, RULES_SUBDIR, this.buildRuleFileName(rule)), - scope: 'project', - source: {kind: 'rule', rule} satisfies OpencodeOutputSource - }) - } - } - } - } - return declarations - } - - override async convertContent(declaration: OutputFileDeclaration, ctx: OutputWriteContext): Promise { - const source = declaration.source as OpencodeOutputSource - switch (source.kind) { - case 'globalMemory': - case 'projectRootMemory': - case 'projectChildMemory': - case 'skillReference': - return source.content - case 'command': - return this.buildCommandContent(source.command, ctx) - case 'subAgent': { - const frontMatter = this.buildOpencodeAgentFrontMatter(source.agent) - return this.buildMarkdownContent(source.agent.content, frontMatter, ctx) - } - case 'skillMain': { - const frontMatter = this.buildOpencodeSkillFrontMatter(source.skill, source.normalizedSkillName) - return this.buildMarkdownContent(source.skill.content as string, frontMatter, ctx) - } - case 'skillResource': - return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content - case 'mcpConfig': - return JSON.stringify( - { - $schema: 'https://opencode.ai/config.json', - plugin: [OPENCODE_RULES_PLUGIN_NAME], - mcp: {} - }, - null, - 2 - ) - case 'rule': - return this.buildRuleContent(source.rule, ctx) - default: - throw new Error(`Unsupported declaration source for ${this.name}`) - } - } - - private buildOpencodeAgentFrontMatter(agent: SubAgentPrompt): Record { - const frontMatter: Record = {} - const source = agent.yamlFrontMatter as Record | undefined - - if (source?.['description'] != null) frontMatter['description'] = source['description'] - - frontMatter['mode'] = source?.['mode'] ?? 'subagent' - - if (source?.['model'] != null) frontMatter['model'] = source['model'] - if (source?.['temperature'] != null) frontMatter['temperature'] = source['temperature'] - if (source?.['maxSteps'] != null) frontMatter['maxSteps'] = source['maxSteps'] - if (source?.['hidden'] != null) frontMatter['hidden'] = source['hidden'] - - if (source?.['allowTools'] != null && Array.isArray(source['allowTools'])) { - const tools: Record = {} - for (const tool of source['allowTools']) tools[String(tool)] = true - frontMatter['tools'] = tools - } - - if (source?.['permission'] != null && typeof source['permission'] === 'object') frontMatter['permission'] = source['permission'] - - for (const [key, value] of Object.entries(source ?? {})) { - if (!['description', 'mode', 'model', 'temperature', 'maxSteps', 'hidden', 'allowTools', 'permission', 'namingCase', 'name', 'color'].includes(key)) { - frontMatter[key] = value - } - } - - return frontMatter - } - - private buildOpencodeSkillFrontMatter(skill: SkillPrompt, skillName: string): Record { - const frontMatter: Record = {} - const source = skill.yamlFrontMatter as Record | undefined - - frontMatter['name'] = skillName - if (source?.['description'] != null) frontMatter['description'] = source['description'] - - frontMatter['license'] = source?.['license'] ?? 'MIT' - frontMatter['compatibility'] = source?.['compatibility'] ?? 'opencode' - - const metadata: Record = {} - const metadataFields = ['author', 'version', 'keywords', 'category', 'repository', 'displayName'] - - for (const field of metadataFields) { - if (source?.[field] != null) metadata[field] = source[field] - } - - const reservedFields = new Set([ - 'name', - 'description', - 'license', - 'compatibility', - 'namingCase', - 'allowTools', - 'keywords', - 'displayName', - 'author', - 'version' - ]) - for (const [key, value] of Object.entries(source ?? {})) { - if (!reservedFields.has(key)) metadata[key] = value - } - - if (Object.keys(metadata).length > 0) frontMatter['metadata'] = metadata - - return frontMatter - } - - private validateAndNormalizeSkillName(name: string): string { - let normalized = name.toLowerCase() - normalized = normalized.replaceAll(/[^a-z0-9-]+/g, '-') - normalized = normalized.replaceAll(/-+/g, '-') - normalized = normalized.replaceAll(/^-|-$/g, '') - - if (normalized.length === 0) normalized = 'skill' - else if (normalized.length > 64) { - normalized = normalized.slice(0, 64) - normalized = normalized.replace(/-$/, '') - } - - return normalized - } -} diff --git a/cli/src/plugins/PromptMarkdownCleanup.test.ts b/cli/src/plugins/PromptMarkdownCleanup.test.ts deleted file mode 100644 index 032e5797..00000000 --- a/cli/src/plugins/PromptMarkdownCleanup.test.ts +++ /dev/null @@ -1,259 +0,0 @@ -import type { - OutputCleanContext, - OutputPlugin, - ProjectChildrenMemoryPrompt, - ProjectRootMemoryPrompt -} from './plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {collectDeletionTargets} from '../commands/CleanupUtils' -import {AgentsOutputPlugin} from './AgentsOutputPlugin' -import {ClaudeCodeCLIOutputPlugin} from './ClaudeCodeCLIOutputPlugin' -import {GeminiCLIOutputPlugin} from './GeminiCLIOutputPlugin' -import {FilePathKind, PromptKind} from './plugin-core' - -interface CleanupTestCase { - readonly name: string - readonly fileName: string - readonly createPlugin: () => OutputPlugin -} - -const TEST_CASES: readonly CleanupTestCase[] = [ - { - name: 'AgentsOutputPlugin', - fileName: 'AGENTS.md', - createPlugin: () => new AgentsOutputPlugin() - }, - { - name: 'ClaudeCodeCLIOutputPlugin', - fileName: 'CLAUDE.md', - createPlugin: () => new ClaudeCodeCLIOutputPlugin() - }, - { - name: 'GeminiCLIOutputPlugin', - fileName: 'GEMINI.md', - createPlugin: () => new GeminiCLIOutputPlugin() - } -] - -function createRootPrompt(content: string): ProjectRootMemoryPrompt { - return { - type: PromptKind.ProjectRootMemory, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Root, - path: '', - getDirectoryName: () => '' - }, - markdownContents: [] - } as ProjectRootMemoryPrompt -} - -function createChildPrompt( - workspaceDir: string, - projectName: string, - relativePath: string, - content: string -): ProjectChildrenMemoryPrompt { - return { - type: PromptKind.ProjectChildrenMemory, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - markdownContents: [], - dir: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: path.join(workspaceDir, projectName), - getDirectoryName: () => path.basename(relativePath), - getAbsolutePath: () => path.join(workspaceDir, projectName, relativePath) - }, - workingChildDirectoryPath: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: path.join(workspaceDir, projectName), - getDirectoryName: () => path.basename(relativePath), - getAbsolutePath: () => path.join(workspaceDir, projectName, relativePath) - } - } as ProjectChildrenMemoryPrompt -} - -function createCleanContext(workspaceDir: string): OutputCleanContext { - return { - logger: { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - }, - fs, - path, - glob, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [ - { - name: '__workspace__', - isWorkspaceRootProject: true, - rootMemoryPrompt: createRootPrompt('workspace root') - }, - { - name: 'aindex', - isPromptSourceProject: true, - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'aindex', - basePath: workspaceDir, - getDirectoryName: () => 'aindex', - getAbsolutePath: () => path.join(workspaceDir, 'aindex') - }, - rootMemoryPrompt: createRootPrompt('prompt-source root'), - childMemoryPrompts: [ - createChildPrompt( - workspaceDir, - 'aindex', - 'commands', - 'prompt-source child' - ) - ] - }, - { - name: 'project-a', - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceDir, 'project-a') - }, - rootMemoryPrompt: createRootPrompt('project root'), - childMemoryPrompts: [ - createChildPrompt( - workspaceDir, - 'project-a', - 'commands', - 'project child' - ) - ] - } - ] - } - } - } as OutputCleanContext -} - -describe.each(TEST_CASES)('$name cleanup', ({fileName, createPlugin}) => { - it('cleans workspace and non-prompt project markdown outputs without touching prompt-source paths', async () => { - const tempDir = fs.mkdtempSync( - path.join(os.tmpdir(), `tnmsc-${fileName.toLowerCase()}-cleanup-`) - ) - const workspaceDir = path.join(tempDir, 'workspace') - const workspaceFile = path.join(workspaceDir, fileName) - const promptSourceRootFile = path.join(workspaceDir, 'aindex', fileName) - const promptSourceChildFile = path.join( - workspaceDir, - 'aindex', - 'commands', - fileName - ) - const projectRootFile = path.join(workspaceDir, 'project-a', fileName) - const projectChildFile = path.join( - workspaceDir, - 'project-a', - 'commands', - fileName - ) - const manualProjectChildFile = path.join( - workspaceDir, - 'project-a', - 'docs', - fileName - ) - - fs.mkdirSync(path.dirname(promptSourceChildFile), {recursive: true}) - fs.mkdirSync(path.dirname(manualProjectChildFile), {recursive: true}) - fs.mkdirSync(path.dirname(projectChildFile), {recursive: true}) - fs.writeFileSync(workspaceFile, '# workspace', 'utf8') - fs.writeFileSync(promptSourceRootFile, '# prompt-source root', 'utf8') - fs.writeFileSync(promptSourceChildFile, '# prompt-source child', 'utf8') - fs.writeFileSync(projectRootFile, '# project root', 'utf8') - fs.writeFileSync(projectChildFile, '# project child', 'utf8') - fs.writeFileSync(manualProjectChildFile, '# manual child', 'utf8') - - try { - const result = await collectDeletionTargets( - [createPlugin()], - createCleanContext(workspaceDir) - ) - const normalizedFilesToDelete = result.filesToDelete.map(target => - target.replaceAll('\\', '/')) - - expect(normalizedFilesToDelete).toEqual( - expect.arrayContaining([ - workspaceFile.replaceAll('\\', '/'), - projectRootFile.replaceAll('\\', '/'), - projectChildFile.replaceAll('\\', '/'), - manualProjectChildFile.replaceAll('\\', '/') - ]) - ) - expect(normalizedFilesToDelete).not.toContain( - promptSourceRootFile.replaceAll('\\', '/') - ) - expect(normalizedFilesToDelete).not.toContain( - promptSourceChildFile.replaceAll('\\', '/') - ) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) -}) - -describe('claudeCodeCLIOutputPlugin cleanup', () => { - it('keeps project-scope .claude cleanup directories registered', async () => { - const tempDir = fs.mkdtempSync( - path.join(os.tmpdir(), 'tnmsc-claude-cleanup-') - ) - const workspaceDir = path.join(tempDir, 'workspace') - const projectClaudeDirs = [ - path.join(workspaceDir, 'project-a', '.claude', 'rules'), - path.join(workspaceDir, 'project-a', '.claude', 'commands'), - path.join(workspaceDir, 'project-a', '.claude', 'agents'), - path.join(workspaceDir, 'project-a', '.claude', 'skills') - ] - - for (const directory of projectClaudeDirs) { - fs.mkdirSync(directory, {recursive: true}) - } - - try { - const result = await collectDeletionTargets( - [new ClaudeCodeCLIOutputPlugin()], - createCleanContext(workspaceDir) - ) - const normalizedDirsToDelete = result.dirsToDelete.map(target => - target.replaceAll('\\', '/')) - - expect(normalizedDirsToDelete).toEqual( - expect.arrayContaining( - projectClaudeDirs.map(target => target.replaceAll('\\', '/')) - ) - ) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/plugins/QoderIDEPluginOutputPlugin.test.ts b/cli/src/plugins/QoderIDEPluginOutputPlugin.test.ts deleted file mode 100644 index 9ab63746..00000000 --- a/cli/src/plugins/QoderIDEPluginOutputPlugin.test.ts +++ /dev/null @@ -1,396 +0,0 @@ -import type {CommandPrompt, GlobalMemoryPrompt, OutputWriteContext, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt, RulePrompt, SkillPrompt} from './plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {createLogger, FilePathKind, PromptKind} from './plugin-core' -import {QoderIDEPluginOutputPlugin} from './QoderIDEPluginOutputPlugin' - -class TestQoderIDEPluginOutputPlugin extends QoderIDEPluginOutputPlugin { - constructor(private readonly testHomeDir: string) { - super() - } - - protected override getHomeDir(): string { - return this.testHomeDir - } -} - -function createWorkspaceRootPrompt(): ProjectRootMemoryPrompt { - return { - type: PromptKind.ProjectRootMemory, - content: 'workspace root prompt', - length: 21, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Root, - path: '', - getDirectoryName: () => '' - }, - markdownContents: [] - } as ProjectRootMemoryPrompt -} - -function createProjectRootPrompt(content: string): ProjectRootMemoryPrompt { - return { - type: PromptKind.ProjectRootMemory, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Root, - path: '', - getDirectoryName: () => '' - }, - markdownContents: [] - } as ProjectRootMemoryPrompt -} - -function createChildPrompt(relativePath: string, content: string): ProjectChildrenMemoryPrompt { - return { - type: PromptKind.ProjectChildrenMemory, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - markdownContents: [], - dir: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: path.resolve('tmp/qoder-dist/app'), - getDirectoryName: () => path.basename(relativePath), - getAbsolutePath: () => path.resolve('tmp/qoder-dist/app', relativePath) - }, - workingChildDirectoryPath: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: path.resolve('tmp/qoder-workspace/project'), - getDirectoryName: () => path.basename(relativePath), - getAbsolutePath: () => path.resolve('tmp/qoder-workspace/project', relativePath) - } - } as ProjectChildrenMemoryPrompt -} - -function createGlobalMemoryPrompt(): GlobalMemoryPrompt { - return { - type: PromptKind.GlobalMemory, - content: 'global prompt', - length: 13, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'aindex/dist/global.mdx', - basePath: path.resolve('.'), - getDirectoryName: () => 'dist', - getAbsolutePath: () => path.resolve('aindex/dist/global.mdx') - }, - markdownContents: [] - } as GlobalMemoryPrompt -} - -function createCommandPrompt(): CommandPrompt { - return { - type: PromptKind.Command, - content: 'command body', - length: 12, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'commands/dev/build.mdx', - basePath: path.resolve('tmp/dist/commands'), - getDirectoryName: () => 'dev', - getAbsolutePath: () => path.resolve('tmp/dist/commands/dev/build.mdx') - }, - commandPrefix: 'dev', - commandName: 'build', - yamlFrontMatter: { - description: 'Build command', - scope: 'project' - }, - markdownContents: [] - } as CommandPrompt -} - -function createSkillPrompt( - scope: 'project' | 'global' = 'project', - name: string = 'ship-it' -): SkillPrompt { - return { - type: PromptKind.Skill, - content: 'skill body', - length: 10, - filePathKind: FilePathKind.Relative, - skillName: name, - dir: { - pathKind: FilePathKind.Relative, - path: `skills/${name}`, - basePath: path.resolve('tmp/dist/skills'), - getDirectoryName: () => name, - getAbsolutePath: () => path.resolve('tmp/dist/skills', name) - }, - yamlFrontMatter: { - description: 'Skill description', - scope - }, - mcpConfig: { - type: PromptKind.SkillMcpConfig, - mcpServers: { - inspector: { - command: 'npx', - args: ['inspector'] - } - }, - rawContent: '{"mcpServers":{"inspector":{"command":"npx","args":["inspector"]}}}' - }, - markdownContents: [] - } as SkillPrompt -} - -function createRulePrompt(scope: 'project' | 'global' = 'project'): RulePrompt { - return { - type: PromptKind.Rule, - content: 'rule body', - length: 9, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'rules/ops/guard.mdx', - basePath: path.resolve('tmp/dist/rules'), - getDirectoryName: () => 'ops', - getAbsolutePath: () => path.resolve('tmp/dist/rules/ops/guard.mdx') - }, - prefix: 'ops', - ruleName: 'guard', - globs: ['src/**'], - scope, - markdownContents: [] - } as RulePrompt -} - -describe('qoderIDEPluginOutputPlugin synthetic workspace project output', () => { - it('writes workspace-root prompt, rules, commands, and skills through the synthetic workspace project', async () => { - const workspaceBase = path.resolve('tmp/qoder-workspace') - const plugin = new QoderIDEPluginOutputPlugin() - const ctx = { - logger: createLogger('QoderIDEPluginOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [{ - name: '__workspace__', - isWorkspaceRootProject: true, - rootMemoryPrompt: createWorkspaceRootPrompt() - }] - }, - commands: [createCommandPrompt()], - skills: [createSkillPrompt()], - rules: [createRulePrompt('project')] - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toContain(path.join(workspaceBase, '.qoder', 'rules', 'always.md')) - expect(paths).toContain(path.join(workspaceBase, '.qoder', 'rules', 'rule-ops-guard.md')) - expect(paths).toContain(path.join(workspaceBase, '.qoder', 'commands', 'dev-build.md')) - expect(paths).toContain(path.join(workspaceBase, '.qoder', 'skills', 'ship-it', 'SKILL.md')) - expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) - }) - - it('skips prompt-derived rules for the prompt-source project but still keeps real project rules', async () => { - const workspaceBase = path.resolve('tmp/qoder-prompt-source') - const plugin = new QoderIDEPluginOutputPlugin() - const ctx = { - logger: createLogger('QoderIDEPluginOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [ - { - name: 'aindex', - isPromptSourceProject: true, - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'aindex', - basePath: workspaceBase, - getDirectoryName: () => 'aindex', - getAbsolutePath: () => path.join(workspaceBase, 'aindex') - }, - rootMemoryPrompt: createProjectRootPrompt('prompt-source root'), - childMemoryPrompts: [createChildPrompt('commands', 'prompt-source child')] - }, - { - name: 'project-a', - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceBase, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceBase, 'project-a') - }, - rootMemoryPrompt: createProjectRootPrompt('project root'), - childMemoryPrompts: [createChildPrompt('commands', 'project child')] - } - ] - }, - globalMemory: createGlobalMemoryPrompt(), - rules: [createRulePrompt('project')] - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).not.toContain(path.join(workspaceBase, 'aindex', '.qoder', 'rules', 'global.md')) - expect(paths).not.toContain(path.join(workspaceBase, 'aindex', '.qoder', 'rules', 'always.md')) - expect(paths).not.toContain(path.join(workspaceBase, 'aindex', '.qoder', 'rules', 'glob-commands.md')) - expect(paths).toContain(path.join(workspaceBase, 'aindex', '.qoder', 'rules', 'rule-ops-guard.md')) - expect(paths).toContain(path.join(workspaceBase, 'project-a', '.qoder', 'rules', 'global.md')) - expect(paths).toContain(path.join(workspaceBase, 'project-a', '.qoder', 'rules', 'always.md')) - expect(paths).toContain(path.join(workspaceBase, 'project-a', '.qoder', 'rules', 'glob-commands.md')) - expect(paths).toContain(path.join(workspaceBase, 'project-a', '.qoder', 'rules', 'rule-ops-guard.md')) - }) - - it('keeps skill files global when only mcp is project-scoped', async () => { - const workspaceBase = path.resolve('tmp/qoder-split-scope-project-mcp') - const homeDir = path.join(workspaceBase, 'home') - const plugin = new TestQoderIDEPluginOutputPlugin(homeDir) - const ctx = { - logger: createLogger('QoderIDEPluginOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - pluginOptions: { - outputScopes: { - plugins: { - QoderIDEPluginOutputPlugin: { - skills: 'global', - mcp: 'project' - } - } - } - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [{ - name: '__workspace__', - isWorkspaceRootProject: true - }] - }, - skills: [ - createSkillPrompt('project', 'inspect-locally'), - createSkillPrompt('global', 'ship-it') - ] - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toContain(path.join(homeDir, '.qoder', 'skills', 'ship-it', 'SKILL.md')) - expect(paths).toContain(path.join(workspaceBase, '.qoder', 'skills', 'inspect-locally', 'mcp.json')) - expect(paths).not.toContain(path.join(workspaceBase, '.qoder', 'skills', 'ship-it', 'SKILL.md')) - expect(paths).not.toContain(path.join(homeDir, '.qoder', 'skills', 'inspect-locally', 'SKILL.md')) - }) - - it('keeps skill files project-scoped when only mcp is global-scoped', async () => { - const workspaceBase = path.resolve('tmp/qoder-split-scope-global-mcp') - const homeDir = path.join(workspaceBase, 'home') - const plugin = new TestQoderIDEPluginOutputPlugin(homeDir) - const ctx = { - logger: createLogger('QoderIDEPluginOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - pluginOptions: { - outputScopes: { - plugins: { - QoderIDEPluginOutputPlugin: { - skills: 'project', - mcp: 'global' - } - } - } - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [{ - name: '__workspace__', - isWorkspaceRootProject: true - }] - }, - skills: [ - createSkillPrompt('project', 'ship-it'), - createSkillPrompt('global', 'inspect-globally') - ] - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toContain(path.join(workspaceBase, '.qoder', 'skills', 'ship-it', 'SKILL.md')) - expect(paths).toContain(path.join(homeDir, '.qoder', 'skills', 'inspect-globally', 'mcp.json')) - expect(paths).not.toContain(path.join(homeDir, '.qoder', 'skills', 'ship-it', 'SKILL.md')) - expect(paths).not.toContain(path.join(workspaceBase, '.qoder', 'skills', 'inspect-globally', 'SKILL.md')) - }) - - it('writes the global prompt to workspace root through the synthetic workspace project', async () => { - const workspaceBase = path.resolve('tmp/qoder-workspace-global-prompt') - const plugin = new QoderIDEPluginOutputPlugin() - const ctx = { - logger: createLogger('QoderIDEPluginOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [{ - name: '__workspace__', - isWorkspaceRootProject: true - }] - }, - globalMemory: createGlobalMemoryPrompt() - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - - expect(declarations.map(declaration => declaration.path)).toContain( - path.join(workspaceBase, '.qoder', 'rules', 'global.md') - ) - }) -}) diff --git a/cli/src/plugins/QoderIDEPluginOutputPlugin.ts b/cli/src/plugins/QoderIDEPluginOutputPlugin.ts deleted file mode 100644 index 0ae4d6ad..00000000 --- a/cli/src/plugins/QoderIDEPluginOutputPlugin.ts +++ /dev/null @@ -1,419 +0,0 @@ -import type { - CommandPrompt, - OutputFileDeclaration, - OutputWriteContext, - ProjectChildrenMemoryPrompt, - RulePrompt, - RuleScope, - SkillPrompt -} from './plugin-core' -import {Buffer} from 'node:buffer' -import * as path from 'node:path' -import {AbstractOutputPlugin, applySubSeriesGlobPrefix, filterByProjectConfig} from './plugin-core' - -const QODER_CONFIG_DIR = '.qoder' -const RULES_SUBDIR = 'rules' -const COMMANDS_SUBDIR = 'commands' -const SKILLS_SUBDIR = 'skills' -const GLOBAL_RULE_FILE = 'global.md' -const PROJECT_RULE_FILE = 'always.md' -const CHILD_RULE_FILE_PREFIX = 'glob-' -const SKILL_FILE_NAME = 'SKILL.md' -const MCP_CONFIG_FILE = 'mcp.json' -const TRIGGER_ALWAYS = 'always_on' -const TRIGGER_GLOB = 'glob' -const RULE_GLOB_KEY = 'glob' -const RULE_FILE_PREFIX = 'rule-' - -type QoderOutputSource - = | {readonly kind: 'command', readonly command: CommandPrompt} - | {readonly kind: 'ruleContent', readonly content: string} - | {readonly kind: 'rulePrompt', readonly rule: RulePrompt} - | {readonly kind: 'skillMain', readonly skill: SkillPrompt} - | {readonly kind: 'skillMcpConfig', readonly rawContent: string} - | {readonly kind: 'skillChildDoc', readonly content: string} - | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} - | {readonly kind: 'ignoreFile', readonly content: string} - -function transformQoderCommandFrontMatter( - _cmd: CommandPrompt, - context: { - readonly sourceFrontMatter?: Record - } -): Record { - const source = context.sourceFrontMatter - - const frontMatter: Record = { - description: 'Fast command', - type: 'user_command' - } - - if (source?.['description'] != null) frontMatter['description'] = source['description'] - if (source?.['argumentHint'] != null) frontMatter['argumentHint'] = source['argumentHint'] - if (source?.['allowTools'] != null && Array.isArray(source['allowTools']) && source['allowTools'].length > 0) frontMatter['allowTools'] = source['allowTools'] - - return frontMatter -} - -export class QoderIDEPluginOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('QoderIDEPluginOutputPlugin', { - globalConfigDir: QODER_CONFIG_DIR, - treatWorkspaceRootProjectAsProject: true, - indexignore: '.qoderignore', - commands: { - subDir: COMMANDS_SUBDIR, - transformFrontMatter: transformQoderCommandFrontMatter - }, - skills: { - subDir: SKILLS_SUBDIR - }, - rules: { - subDir: RULES_SUBDIR, - sourceScopes: ['project', 'global'] - }, - cleanup: { - delete: { - project: { - dirs: ['.qoder/commands', '.qoder/rules', '.qoder/skills'] - }, - global: { - dirs: ['.qoder/commands', '.qoder/rules', '.qoder/skills'] - } - } - }, - capabilities: { - prompt: { - scopes: ['project', 'global'], - singleScope: false - }, - rules: { - scopes: ['project', 'global'], - singleScope: false - }, - commands: { - scopes: ['project', 'global'], - singleScope: true - }, - skills: { - scopes: ['project', 'global'], - singleScope: true - }, - mcp: { - scopes: ['project', 'global'], - singleScope: true - } - } - }) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {globalMemory, commands, skills, rules, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext - const globalDir = this.getGlobalConfigDir() - const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - const activeRuleScopes = new Set(rules != null ? this.selectRuleScopes(ctx, rules) : []) - const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) - const promptProjects = this.getProjectPromptOutputProjects(ctx) - const selectedCommands = commands != null - ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, command => this.resolveCommandSourceScope(command), this.getTopicScopeOverride(ctx, 'commands')) - : {items: [] as readonly CommandPrompt[]} - const selectedSkills = skills != null - ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) - : {items: [] as readonly SkillPrompt[]} - const selectedMcpSkills = skills != null - ? this.selectSingleScopeItems( - skills, - this.skillsConfig.sourceScopes, - skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') - ) - : {items: [] as readonly SkillPrompt[]} - - const pushSkillDeclarations = ( - baseDir: string, - scope: 'project' | 'global', - filteredSkills: readonly SkillPrompt[] - ): void => { - for (const skill of filteredSkills) { - const skillName = this.getSkillName(skill) - const skillDir = path.join(baseDir, SKILLS_SUBDIR, skillName) - declarations.push({ - path: path.join(skillDir, SKILL_FILE_NAME), - scope, - source: {kind: 'skillMain', skill} satisfies QoderOutputSource - }) - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) { - declarations.push({ - path: path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), - scope, - source: { - kind: 'skillChildDoc', - content: childDoc.content as string - } satisfies QoderOutputSource - }) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - declarations.push({ - path: path.join(skillDir, resource.relativePath), - scope, - source: { - kind: 'skillResource', - content: resource.content, - encoding: resource.encoding - } satisfies QoderOutputSource - }) - } - } - } - } - - const pushSkillMcpDeclarations = ( - baseDir: string, - scope: 'project' | 'global', - filteredMcpSkills: readonly SkillPrompt[] - ): void => { - for (const skill of filteredMcpSkills) { - if (skill.mcpConfig == null) continue - - const skillDir = path.join(baseDir, SKILLS_SUBDIR, this.getSkillName(skill)) - declarations.push({ - path: path.join(skillDir, MCP_CONFIG_FILE), - scope, - source: { - kind: 'skillMcpConfig', - rawContent: skill.mcpConfig.rawContent - } satisfies QoderOutputSource - }) - } - } - - if (selectedCommands.selectedScope === 'project') { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectBase = this.resolveProjectConfigDir(ctx, project) - if (projectBase == null) continue - - const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') - for (const command of filteredCommands) { - declarations.push({ - path: path.join(projectBase, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), - scope: 'project', - source: {kind: 'command', command} satisfies QoderOutputSource - }) - } - } - } - - if (selectedCommands.selectedScope === 'global') { - const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') - for (const command of filteredCommands) { - declarations.push({ - path: path.join(globalDir, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), - scope: 'global', - source: {kind: 'command', command} satisfies QoderOutputSource - }) - } - } - - if (selectedSkills.selectedScope === 'project' || selectedMcpSkills.selectedScope === 'project') { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectBase = this.resolveProjectConfigDir(ctx, project) - if (projectBase == null) continue - - if (selectedSkills.selectedScope === 'project') { - const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') - pushSkillDeclarations(projectBase, 'project', filteredSkills) - } - - if (selectedMcpSkills.selectedScope === 'project') { - const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, project.projectConfig, 'skills') - pushSkillMcpDeclarations(projectBase, 'project', filteredMcpSkills) - } - } - } - - if (selectedSkills.selectedScope === 'global' || selectedMcpSkills.selectedScope === 'global') { - if (selectedSkills.selectedScope === 'global') { - const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') - pushSkillDeclarations(globalDir, 'global', filteredSkills) - } - - if (selectedMcpSkills.selectedScope === 'global') { - const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, promptSourceProjectConfig, 'skills') - pushSkillMcpDeclarations(globalDir, 'global', filteredMcpSkills) - } - } - - if (globalMemory != null && activePromptScopes.has('global')) { - for (const project of promptProjects) { - const projectBase = this.resolveProjectConfigDir(ctx, project) - if (projectBase == null) continue - declarations.push({ - path: path.join(projectBase, RULES_SUBDIR, GLOBAL_RULE_FILE), - scope: 'project', - source: { - kind: 'ruleContent', - content: this.buildAlwaysRuleContent(globalMemory.content as string, ctx) - } satisfies QoderOutputSource - }) - } - } - - if (activePromptScopes.has('project')) { - for (const project of promptProjects) { - const projectBase = this.resolveProjectConfigDir(ctx, project) - if (projectBase == null) continue - - if (project.rootMemoryPrompt != null) { - declarations.push({ - path: path.join(projectBase, RULES_SUBDIR, PROJECT_RULE_FILE), - scope: 'project', - source: { - kind: 'ruleContent', - content: this.buildAlwaysRuleContent(project.rootMemoryPrompt.content as string, ctx) - } satisfies QoderOutputSource - }) - } - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) { - declarations.push({ - path: path.join(projectBase, RULES_SUBDIR, this.buildChildRuleFileName(child)), - scope: 'project', - source: { - kind: 'ruleContent', - content: this.buildGlobRuleContent(child, ctx) - } satisfies QoderOutputSource - }) - } - } - } - } - - if (rules != null && rules.length > 0 && activeRuleScopes.has('project')) { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectBase = this.resolveProjectConfigDir(ctx, project) - if (projectBase == null) continue - - const projectRules = applySubSeriesGlobPrefix( - filterByProjectConfig(rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'project'), project.projectConfig, 'rules'), - project.projectConfig - ) - for (const rule of projectRules) { - declarations.push({ - path: path.join(projectBase, RULES_SUBDIR, this.buildRuleFileName(rule)), - scope: 'project', - source: {kind: 'rulePrompt', rule} satisfies QoderOutputSource - }) - } - } - } - - if (rules != null && rules.length > 0 && activeRuleScopes.has('global')) { - const globalRules = rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'global') - for (const rule of globalRules) { - declarations.push({ - path: path.join(globalDir, RULES_SUBDIR, this.buildRuleFileName(rule)), - scope: 'global', - source: {kind: 'rulePrompt', rule} satisfies QoderOutputSource - }) - } - } - - const ignoreOutputPath = this.getIgnoreOutputPath() - const ignoreFile = this.indexignore == null - ? void 0 - : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) - if (ignoreOutputPath != null && ignoreFile != null) { - for (const project of this.getConcreteProjects(ctx)) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null || project.isPromptSourceProject === true) continue - declarations.push({ - path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), - scope: 'project', - source: { - kind: 'ignoreFile', - content: ignoreFile.content - } satisfies QoderOutputSource - }) - } - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - const source = declaration.source as QoderOutputSource - switch (source.kind) { - case 'command': return this.buildCommandContent(source.command, ctx) - case 'ruleContent': return source.content - case 'rulePrompt': return this.buildRuleContent(source.rule, ctx) - case 'skillMain': { - const fmData = this.buildSkillFrontMatter(source.skill) - return this.buildMarkdownContent(source.skill.content as string, fmData, ctx) - } - case 'skillMcpConfig': return source.rawContent - case 'skillChildDoc': - case 'ignoreFile': return source.content - case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content - default: throw new Error(`Unsupported declaration source for ${this.name}`) - } - } - - private buildChildRuleFileName(child: ProjectChildrenMemoryPrompt): string { - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '').replaceAll('/', '-') - return `${CHILD_RULE_FILE_PREFIX}${normalized.length > 0 ? normalized : 'root'}.md` - } - - private buildAlwaysRuleContent(content: string, ctx: OutputWriteContext): string { - return this.buildMarkdownContent(content, {trigger: TRIGGER_ALWAYS, type: 'user_command'}, ctx) - } - - private buildGlobRuleContent(child: ProjectChildrenMemoryPrompt, ctx: OutputWriteContext): string { - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '') - const pattern = normalized.length === 0 ? '**/*' : `${normalized}/**` - return this.buildMarkdownContent(child.content as string, {trigger: TRIGGER_GLOB, [RULE_GLOB_KEY]: pattern, type: 'user_command'}, ctx) - } - - protected override buildSkillFrontMatter(skill: SkillPrompt): Record { - const fm = skill.yamlFrontMatter - return { - name: this.getSkillName(skill), - description: fm.description, - type: 'user_command', - ...fm.displayName != null && {displayName: fm.displayName}, - ...fm.keywords != null && fm.keywords.length > 0 && {keywords: fm.keywords}, - ...fm.author != null && {author: fm.author}, - ...fm.version != null && {version: fm.version}, - ...fm.allowTools != null && fm.allowTools.length > 0 && {allowTools: fm.allowTools} - } - } - - protected override buildRuleFileName(rule: RulePrompt, prefix: string = RULE_FILE_PREFIX): string { - return `${prefix}${rule.prefix}-${rule.ruleName}.md` - } - - protected override buildRuleContent(rule: RulePrompt, ctx?: OutputWriteContext): string { - const fmData: Record = { - trigger: TRIGGER_GLOB, - [RULE_GLOB_KEY]: rule.globs.length > 0 ? rule.globs.join(', ') : '**/*', - type: 'user_command' - } - return this.buildMarkdownContent(rule.content, fmData, ctx) - } - - protected override normalizeRuleScope(rule: RulePrompt): RuleScope { - return rule.scope ?? 'global' - } -} diff --git a/cli/src/plugins/ReadmeMdConfigFileOutputPlugin.ts b/cli/src/plugins/ReadmeMdConfigFileOutputPlugin.ts deleted file mode 100644 index 0ae873d9..00000000 --- a/cli/src/plugins/ReadmeMdConfigFileOutputPlugin.ts +++ /dev/null @@ -1,72 +0,0 @@ -import type { - OutputFileDeclaration, - OutputWriteContext, - ReadmeFileKind -} from './plugin-core' - -import * as path from 'node:path' -import {AbstractOutputPlugin, README_FILE_KIND_MAP} from './plugin-core' - -function resolveOutputFileName(fileKind?: ReadmeFileKind): string { - return README_FILE_KIND_MAP[fileKind ?? 'Readme'].out -} - -/** - * Output plugin for writing readme-family files to project directories. - * Reads README prompts collected by ReadmeMdInputCapability and writes them - * to the corresponding project directories. - * - * Output mapping: - * - fileKind=Readme → README.md - * - fileKind=CodeOfConduct → CODE_OF_CONDUCT.md - * - fileKind=Security → SECURITY.md - * - * Supports: - * - Root files (written to project root) - * - Child files (written to project subdirectories) - * - Dry-run mode (preview without writing) - * - Clean operation (delete generated files) - */ -export class ReadmeMdConfigFileOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('ReadmeMdConfigFileOutputPlugin', { - outputFileName: 'README.md', - cleanup: { - delete: { - project: { - files: ['README.md', 'CODE_OF_CONDUCT.md', 'SECURITY.md'] - } - } - }, - capabilities: {} - }) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {readmePrompts} = ctx.collectedOutputContext - if (readmePrompts == null || readmePrompts.length === 0) return declarations - - for (const readme of readmePrompts) { - const outputFileName = resolveOutputFileName(readme.fileKind) - const filePath = path.join(readme.targetDir.basePath, readme.targetDir.path, outputFileName) - declarations.push({ - path: filePath, - scope: 'project', - source: {content: readme.content as string} - }) - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - void ctx - const source = declaration.source as {content?: string} - if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) - return source.content - } -} diff --git a/cli/src/plugins/TraeCNIDEOutputPlugin.ts b/cli/src/plugins/TraeCNIDEOutputPlugin.ts deleted file mode 100644 index 751242c7..00000000 --- a/cli/src/plugins/TraeCNIDEOutputPlugin.ts +++ /dev/null @@ -1,60 +0,0 @@ -import type { - OutputFileDeclaration, - OutputWriteContext -} from './plugin-core' -import {AbstractOutputPlugin} from './plugin-core' - -const GLOBAL_MEMORY_FILE = 'GLOBAL.md' -const GLOBAL_CONFIG_DIR = '.trae-cn' -const USER_RULES_SUBDIR = 'user_rules' - -export class TraeCNIDEOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('TraeCNIDEOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: GLOBAL_MEMORY_FILE, - dependsOn: ['TraeIDEOutputPlugin'], - cleanup: { - delete: { - global: { - dirs: ['.trae-cn/user_rules'] - } - } - }, - capabilities: { - prompt: { - scopes: ['global'], - singleScope: false - } - } - }) - } - - private getGlobalUserRulesDir(): string { - return this.joinPath(this.getGlobalConfigDir(), USER_RULES_SUBDIR) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['global'])) - if (!activePromptScopes.has('global')) return [] - - const {globalMemory} = ctx.collectedOutputContext - if (globalMemory == null) return [] - - return [{ - path: this.joinPath(this.getGlobalUserRulesDir(), GLOBAL_MEMORY_FILE), - scope: 'global', - source: {content: globalMemory.content as string} - }] - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - void ctx - const source = declaration.source as {content?: string} - if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) - return source.content - } -} diff --git a/cli/src/plugins/TraeIDEOutputPlugin.test.ts b/cli/src/plugins/TraeIDEOutputPlugin.test.ts deleted file mode 100644 index 54835979..00000000 --- a/cli/src/plugins/TraeIDEOutputPlugin.test.ts +++ /dev/null @@ -1,125 +0,0 @@ -import type {OutputWriteContext, ProjectChildrenMemoryPrompt} from './plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {createLogger, FilePathKind, PromptKind} from './plugin-core' -import {TraeIDEOutputPlugin} from './TraeIDEOutputPlugin' - -function createChildPrompt(relativePath: string, content: string): ProjectChildrenMemoryPrompt { - return { - type: PromptKind.ProjectChildrenMemory, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - markdownContents: [], - dir: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: path.resolve('tmp/dist/app'), - getDirectoryName: () => path.basename(relativePath), - getAbsolutePath: () => path.resolve('tmp/dist/app', relativePath) - }, - workingChildDirectoryPath: { - pathKind: FilePathKind.Relative, - path: relativePath, - basePath: path.resolve('tmp/workspace/project'), - getDirectoryName: () => path.basename(relativePath), - getAbsolutePath: () => path.resolve('tmp/workspace/project', relativePath) - } - } as ProjectChildrenMemoryPrompt -} - -describe('traeIDEOutputPlugin steering rule output', () => { - it('emits project-relative glob and injects output-dir scope guard', async () => { - const plugin = new TraeIDEOutputPlugin() - const workspaceBase = path.resolve('tmp/trae-plugin-test') - const ctx = { - logger: createLogger('TraeIDEOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [ - { - name: 'project-a', - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceBase, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceBase, 'project-a') - }, - childMemoryPrompts: [createChildPrompt('commands', 'Rule body')] - } - ] - } - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const steering = declarations.find(d => d.source != null && (d.source as {kind?: string}).kind === 'steeringRule') - expect(steering).toBeDefined() - if (steering == null) throw new Error('Expected steering declaration') - - const {content} = steering.source as {content: string} - expect(content).toContain('globs: commands/**') - expect(content).toContain('Scope guard: this rule is for the project-root path "commands/" only.') - expect(content).toContain('Do not apply this rule to generated output paths such as "dist/commands/"') - expect(content).toContain('globs: commands/**\n---\n\nScope guard:') - }) - - it('honors frontMatter.blankLineAfter=false for prebuilt steering rule content', async () => { - const plugin = new TraeIDEOutputPlugin() - const workspaceBase = path.resolve('tmp/trae-plugin-test-no-blank-line') - const ctx = { - logger: createLogger('TraeIDEOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - pluginOptions: { - frontMatter: { - blankLineAfter: false - } - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [ - { - name: 'project-a', - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceBase, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceBase, 'project-a') - }, - childMemoryPrompts: [createChildPrompt('commands', 'Rule body')] - } - ] - } - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const steering = declarations.find(d => d.source != null && (d.source as {kind?: string}).kind === 'steeringRule') - expect(steering).toBeDefined() - if (steering == null) throw new Error('Expected steering declaration') - - const {content} = steering.source as {content: string} - expect(content).toContain('---\nScope guard:') - expect(content).not.toContain('---\n\nScope guard:') - }) -}) diff --git a/cli/src/plugins/TraeIDEOutputPlugin.ts b/cli/src/plugins/TraeIDEOutputPlugin.ts deleted file mode 100644 index 691cea41..00000000 --- a/cli/src/plugins/TraeIDEOutputPlugin.ts +++ /dev/null @@ -1,295 +0,0 @@ -import type { - CommandPrompt, - OutputFileDeclaration, - OutputWriteContext, - ProjectChildrenMemoryPrompt, - SkillPrompt -} from './plugin-core' -import {Buffer} from 'node:buffer' -import * as path from 'node:path' -import {AbstractOutputPlugin, filterByProjectConfig} from './plugin-core' - -const GLOBAL_MEMORY_FILE = 'GLOBAL.md' -const GLOBAL_CONFIG_DIR = '.trae' -const STEERING_SUBDIR = 'steering' -const RULES_SUBDIR = 'rules' -const COMMANDS_SUBDIR = 'commands' -const SKILLS_SUBDIR = 'skills' -const SKILL_FILE_NAME = 'SKILL.md' - -type TraeOutputSource - = | {readonly kind: 'globalMemory', readonly content: string} - | {readonly kind: 'steeringRule', readonly content: string} - | {readonly kind: 'command', readonly command: CommandPrompt} - | {readonly kind: 'skillMain', readonly skill: SkillPrompt} - | {readonly kind: 'skillChildDoc', readonly content: string} - | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} - | {readonly kind: 'ignoreFile', readonly content: string} - -export class TraeIDEOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('TraeIDEOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: GLOBAL_MEMORY_FILE, - treatWorkspaceRootProjectAsProject: true, - indexignore: '.traeignore', - commands: { - subDir: COMMANDS_SUBDIR, - transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} - }, - skills: { - subDir: SKILLS_SUBDIR - }, - cleanup: { - delete: { - project: { - dirs: ['.trae/rules', '.trae/commands', '.trae/skills'] - }, - global: { - dirs: ['.trae/steering', '.trae/commands', '.trae/skills'] - } - } - }, - capabilities: { - prompt: { - scopes: ['project', 'global'], - singleScope: false - }, - commands: { - scopes: ['project', 'global'], - singleScope: true - }, - skills: { - scopes: ['project', 'global'], - singleScope: true - } - } - }) - } - - protected override getIgnoreOutputPath(): string | undefined { - if (this.indexignore == null) return void 0 - return path.join('.trae', '.ignore') - } - - private getGlobalSteeringDir(): string { - return this.joinPath(this.getGlobalConfigDir(), STEERING_SUBDIR) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {commands, skills, globalMemory, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext - const concreteProjects = this.getConcreteProjects(ctx) - const promptProjects = this.getProjectPromptOutputProjects(ctx) - const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) - const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) - const selectedCommands = commands != null - ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, command => this.resolveCommandSourceScope(command), this.getTopicScopeOverride(ctx, 'commands')) - : {items: [] as readonly CommandPrompt[]} - const selectedSkills = skills != null - ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) - : {items: [] as readonly SkillPrompt[]} - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - - if (globalMemory != null && activePromptScopes.has('global')) { - declarations.push({ - path: this.joinPath(this.getGlobalSteeringDir(), GLOBAL_MEMORY_FILE), - scope: 'global', - source: { - kind: 'globalMemory', - content: globalMemory.content as string - } satisfies TraeOutputSource - }) - } - - for (const project of promptProjects) { - const projectBase = this.resolveProjectRootDir(ctx, project) - if (projectBase == null) continue - - if (project.childMemoryPrompts != null && activePromptScopes.has('project')) { - for (const child of project.childMemoryPrompts) { - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const normalizedChildPath = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '') - const globPattern = this.buildProjectRelativeGlobPattern(normalizedChildPath) - const steeringContent = this.buildMarkdownContent( - [ - this.buildPathGuardHint(normalizedChildPath), - '', - child.content as string - ].join('\n'), - {alwaysApply: false, globs: globPattern}, - ctx - ) - - declarations.push({ - path: path.join(projectBase, GLOBAL_CONFIG_DIR, RULES_SUBDIR, this.buildSteeringFileName(child)), - scope: 'project', - source: { - kind: 'steeringRule', - content: steeringContent - } satisfies TraeOutputSource - }) - } - } - } - - if (selectedCommands.selectedScope === 'project') { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectBase = this.resolveProjectConfigDir(ctx, project) - if (projectBase == null) continue - - const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') - for (const command of filteredCommands) { - declarations.push({ - path: path.join(projectBase, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), - scope: 'project', - source: {kind: 'command', command} satisfies TraeOutputSource - }) - } - } - } - - if (selectedCommands.selectedScope === 'global') { - const baseDir = this.getGlobalConfigDir() - const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') - for (const command of filteredCommands) { - declarations.push({ - path: path.join(baseDir, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), - scope: 'global', - source: {kind: 'command', command} satisfies TraeOutputSource - }) - } - } - - const pushSkillDeclarations = ( - baseDir: string, - scope: 'project' | 'global', - filteredSkills: readonly SkillPrompt[] - ): void => { - for (const skill of filteredSkills) { - const skillName = this.getSkillName(skill) - const skillDir = path.join(baseDir, SKILLS_SUBDIR, skillName) - declarations.push({ - path: path.join(skillDir, SKILL_FILE_NAME), - scope, - source: {kind: 'skillMain', skill} satisfies TraeOutputSource - }) - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) { - declarations.push({ - path: path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), - scope, - source: { - kind: 'skillChildDoc', - content: childDoc.content as string - } satisfies TraeOutputSource - }) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - declarations.push({ - path: path.join(skillDir, resource.relativePath), - scope, - source: { - kind: 'skillResource', - content: resource.content, - encoding: resource.encoding - } satisfies TraeOutputSource - }) - } - } - } - } - - if (selectedSkills.selectedScope === 'project') { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectBase = this.resolveProjectConfigDir(ctx, project) - if (projectBase == null) continue - const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') - pushSkillDeclarations(projectBase, 'project', filteredSkills) - } - } - - if (selectedSkills.selectedScope === 'global') { - const baseDir = this.getGlobalConfigDir() - const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') - pushSkillDeclarations(baseDir, 'global', filteredSkills) - } - - const ignoreOutputPath = this.getIgnoreOutputPath() - const ignoreFile = this.indexignore == null - ? void 0 - : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) - if (ignoreOutputPath != null && ignoreFile != null) { - for (const project of concreteProjects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null || project.isPromptSourceProject === true) continue - declarations.push({ - path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), - scope: 'project', - source: { - kind: 'ignoreFile', - content: ignoreFile.content - } satisfies TraeOutputSource - }) - } - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - const source = declaration.source as TraeOutputSource - switch (source.kind) { - case 'globalMemory': - case 'steeringRule': - case 'skillChildDoc': - case 'ignoreFile': return source.content - case 'command': return this.buildCommandContent(source.command, ctx) - case 'skillMain': { - const frontMatterData = this.buildSkillFrontMatter(source.skill) - return this.buildMarkdownContent(source.skill.content as string, frontMatterData, ctx) - } - case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content - default: throw new Error(`Unsupported declaration source for ${this.name}`) - } - } - - protected override buildSkillFrontMatter(skill: SkillPrompt): Record { - const fm: Record = { - description: skill.yamlFrontMatter.description ?? '' - } - - if (skill.yamlFrontMatter.displayName != null) fm['name'] = skill.yamlFrontMatter.displayName - - return fm - } - - private buildSteeringFileName(child: ProjectChildrenMemoryPrompt): string { - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '').replaceAll('/', '-') - return `trae-${normalized}.md` - } - - private buildPathGuardHint(normalizedChildPath: string): string { - if (normalizedChildPath.length === 0) { - return 'Scope guard: apply this rule to project source files only; do not apply to generated output directories (for example dist/, build/, out/, .next/, target/).' - } - - return [ - `Scope guard: this rule is for the project-root path "${normalizedChildPath}/" only.`, - `Do not apply this rule to generated output paths such as "dist/${normalizedChildPath}/", "build/${normalizedChildPath}/", "out/${normalizedChildPath}/", ".next/${normalizedChildPath}/", or "target/${normalizedChildPath}/".` - ].join('\n') - } - - private buildProjectRelativeGlobPattern(normalizedChildPath: string): string { - if (normalizedChildPath.length === 0) return '**/*' - return `${normalizedChildPath}/**` - } -} diff --git a/cli/src/plugins/VisualStudioCodeIDEConfigOutputPlugin.ts b/cli/src/plugins/VisualStudioCodeIDEConfigOutputPlugin.ts deleted file mode 100644 index d65290d9..00000000 --- a/cli/src/plugins/VisualStudioCodeIDEConfigOutputPlugin.ts +++ /dev/null @@ -1,65 +0,0 @@ -import type { - OutputFileDeclaration, - OutputWriteContext -} from './plugin-core' -import {AbstractOutputPlugin, IDEKind} from './plugin-core' - -const VSCODE_DIR = '.vscode' - -export class VisualStudioCodeIDEConfigOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('VisualStudioCodeIDEConfigOutputPlugin', { - cleanup: { - delete: { - project: { - files: ['.vscode/settings.json', '.vscode/extensions.json'] - } - } - }, - capabilities: {} - }) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {projects} = ctx.collectedOutputContext.workspace - const {vscodeConfigFiles} = ctx.collectedOutputContext - const vscodeConfigs = vscodeConfigFiles ?? [] - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - for (const config of vscodeConfigs) { - const targetRelativePath = this.getTargetRelativePath(config) - declarations.push({ - path: this.resolvePath(projectDir.basePath, projectDir.path, targetRelativePath), - scope: 'project', - source: {content: config.content} - }) - } - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - void ctx - const source = declaration.source as {content?: string} - if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) - return source.content - } - - private getTargetRelativePath(config: {type: IDEKind, dir: {path: string}}): string { - const sourcePath = config.dir.path - - if (config.type !== IDEKind.VSCode) return this.basename(sourcePath) - - const vscodeIndex = sourcePath.indexOf(VSCODE_DIR) - if (vscodeIndex !== -1) return sourcePath.slice(Math.max(0, vscodeIndex)) - return this.joinPath(VSCODE_DIR, this.basename(sourcePath)) - } -} diff --git a/cli/src/plugins/WarpIDEOutputPlugin.test.ts b/cli/src/plugins/WarpIDEOutputPlugin.test.ts deleted file mode 100644 index cfd2b31e..00000000 --- a/cli/src/plugins/WarpIDEOutputPlugin.test.ts +++ /dev/null @@ -1,75 +0,0 @@ -import type {GlobalMemoryPrompt, OutputWriteContext, ProjectRootMemoryPrompt} from './plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {createLogger, FilePathKind, PromptKind} from './plugin-core' -import {WarpIDEOutputPlugin} from './WarpIDEOutputPlugin' - -function createGlobalMemoryPrompt(): GlobalMemoryPrompt { - return { - type: PromptKind.GlobalMemory, - content: 'global prompt', - length: 13, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'aindex/dist/global.mdx', - basePath: path.resolve('.'), - getDirectoryName: () => 'dist', - getAbsolutePath: () => path.resolve('aindex/dist/global.mdx') - }, - markdownContents: [] - } as GlobalMemoryPrompt -} - -function createWorkspaceRootPrompt(): ProjectRootMemoryPrompt { - return { - type: PromptKind.ProjectRootMemory, - content: 'workspace prompt', - length: 16, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Root, - path: '', - getDirectoryName: () => '' - }, - markdownContents: [] - } as ProjectRootMemoryPrompt -} - -describe('warpIDEOutputPlugin workspace prompt support', () => { - it('writes the synthetic workspace root prompt to workspaceDir/WARP.md', async () => { - const workspaceBase = path.resolve('tmp/warp-workspace') - const plugin = new WarpIDEOutputPlugin() - const ctx = { - logger: createLogger('WarpIDEOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [{ - name: '__workspace__', - isWorkspaceRootProject: true, - rootMemoryPrompt: createWorkspaceRootPrompt() - }] - }, - globalMemory: createGlobalMemoryPrompt() - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(ctx) - const workspaceDeclaration = declarations.find(declaration => declaration.path === path.join(workspaceBase, 'WARP.md')) - - expect(workspaceDeclaration?.path).toBe(path.join(workspaceBase, 'WARP.md')) - expect(workspaceDeclaration?.scope).toBe('project') - expect((workspaceDeclaration?.source as {content?: string} | undefined)?.content).toContain('global prompt') - expect((workspaceDeclaration?.source as {content?: string} | undefined)?.content).toContain('workspace prompt') - }) -}) diff --git a/cli/src/plugins/WarpIDEOutputPlugin.ts b/cli/src/plugins/WarpIDEOutputPlugin.ts deleted file mode 100644 index 6168955b..00000000 --- a/cli/src/plugins/WarpIDEOutputPlugin.ts +++ /dev/null @@ -1,110 +0,0 @@ -import type { - OutputFileDeclaration, - OutputWriteContext -} from './plugin-core' -import * as path from 'node:path' -import {AbstractOutputPlugin, PLUGIN_NAMES} from './plugin-core' - -const PROJECT_MEMORY_FILE = 'WARP.md' - -export class WarpIDEOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('WarpIDEOutputPlugin', { - outputFileName: PROJECT_MEMORY_FILE, - treatWorkspaceRootProjectAsProject: true, - indexignore: '.warpindexignore', - cleanup: { - delete: { - project: { - files: [PROJECT_MEMORY_FILE] - } - } - }, - capabilities: { - prompt: { - scopes: ['project', 'global'], - singleScope: false - } - } - }) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {globalMemory, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext - const projects = this.getConcreteProjects(ctx) - const promptProjects = this.getProjectPromptOutputProjects(ctx) - const agentsRegistered = this.shouldSkipDueToPlugin(ctx, PLUGIN_NAMES.AgentsOutput) - const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) - const globalMemoryContent = this.extractGlobalMemoryContent(ctx) - - if (agentsRegistered) { - if (globalMemory != null && activePromptScopes.has('global')) { - for (const project of promptProjects) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (projectRootDir == null) continue - declarations.push({ - path: path.join(projectRootDir, PROJECT_MEMORY_FILE), - scope: 'project', - source: {content: globalMemory.content as string} - }) - } - } - } else { - for (const project of promptProjects) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (projectRootDir == null) continue - - if (project.rootMemoryPrompt != null && activePromptScopes.has('project')) { - const combinedContent = this.combineGlobalWithContent( - globalMemoryContent, - project.rootMemoryPrompt.content as string - ) - declarations.push({ - path: path.join(projectRootDir, PROJECT_MEMORY_FILE), - scope: 'project', - source: {content: combinedContent} - }) - } - - if (project.childMemoryPrompts != null && activePromptScopes.has('project')) { - for (const child of project.childMemoryPrompts) { - declarations.push({ - path: this.resolveFullPath(child.dir), - scope: 'project', - source: {content: child.content as string} - }) - } - } - } - } - - const ignoreOutputPath = this.getIgnoreOutputPath() - const ignoreFile = this.indexignore == null - ? void 0 - : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) - if (ignoreOutputPath != null && ignoreFile != null) { - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null || project.isPromptSourceProject === true) continue - declarations.push({ - path: this.resolvePath(projectDir.basePath, projectDir.path, ignoreOutputPath), - scope: 'project', - source: {content: ignoreFile.content} - }) - } - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - void ctx - const source = declaration.source as {content?: string} - if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) - return source.content - } -} diff --git a/cli/src/plugins/WindsurfOutputPlugin.test.ts b/cli/src/plugins/WindsurfOutputPlugin.test.ts deleted file mode 100644 index dbe7f76d..00000000 --- a/cli/src/plugins/WindsurfOutputPlugin.test.ts +++ /dev/null @@ -1,212 +0,0 @@ -import type {CommandPrompt, OutputScopeSelection, OutputWriteContext, Project, RulePrompt, SkillPrompt} from './plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {createLogger, FilePathKind, PromptKind} from './plugin-core' -import {WindsurfOutputPlugin} from './WindsurfOutputPlugin' - -function createCommandPrompt(scope: 'project' | 'global', seriName: string): CommandPrompt { - return { - type: PromptKind.Command, - content: 'command content', - length: 15, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'dev/build.mdx', - basePath: path.resolve('tmp/dist/commands'), - getDirectoryName: () => 'dev', - getAbsolutePath: () => path.resolve('tmp/dist/commands/dev/build.mdx') - }, - commandPrefix: 'dev', - commandName: 'build', - seriName, - yamlFrontMatter: { - namingCase: 'kebabCase', - description: 'Build command', - scope - }, - markdownContents: [] - } as CommandPrompt -} - -function createSkillPrompt(scope: 'project' | 'global', seriName: string): SkillPrompt { - return { - type: PromptKind.Skill, - content: 'skill content', - length: 13, - filePathKind: FilePathKind.Relative, - skillName: 'ship-it', - dir: { - pathKind: FilePathKind.Relative, - path: 'skills/ship-it', - basePath: path.resolve('tmp/dist/skills'), - getDirectoryName: () => 'ship-it', - getAbsolutePath: () => path.resolve('tmp/dist/skills/ship-it') - }, - seriName, - yamlFrontMatter: { - namingCase: 'kebabCase', - description: 'Ship release', - scope - }, - markdownContents: [] - } as SkillPrompt -} - -function createRulePrompt(scope: 'project' | 'global'): RulePrompt { - return { - type: PromptKind.Rule, - content: 'rule body', - length: 9, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'rules/ops/guard.mdx', - basePath: path.resolve('tmp/dist/rules'), - getDirectoryName: () => 'ops', - getAbsolutePath: () => path.resolve('tmp/dist/rules/ops/guard.mdx') - }, - prefix: 'ops', - ruleName: 'guard', - globs: ['src/**'], - scope, - markdownContents: [] - } as RulePrompt -} - -function createProject(workspaceBase: string, name: string, includeSeries: readonly string[], promptSource = false): Project { - return { - name, - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: name, - basePath: workspaceBase, - getDirectoryName: () => name, - getAbsolutePath: () => path.join(workspaceBase, name) - }, - isPromptSourceProject: promptSource, - projectConfig: { - includeSeries: [...includeSeries] - } - } as Project -} - -function createWorkspaceRootProject(): Project { - return { - name: '__workspace__', - isWorkspaceRootProject: true - } as Project -} - -function createWriteContext( - workspaceBase: string, - projects: readonly Project[], - commands: readonly CommandPrompt[], - skills: readonly SkillPrompt[], - scopeOverrides: { - readonly commands: OutputScopeSelection - readonly skills: OutputScopeSelection - } -): OutputWriteContext { - return { - logger: createLogger('WindsurfOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - pluginOptions: { - outputScopes: { - plugins: { - WindsurfOutputPlugin: scopeOverrides - } - } - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [...projects] - }, - commands, - skills - } - } as OutputWriteContext -} - -describe('windsurfOutputPlugin synthetic workspace project output', () => { - it('writes workflows and skills to each real project when project scope is selected', async () => { - const workspaceBase = path.resolve('tmp/windsurf-project-scope') - const plugin = new WindsurfOutputPlugin() - const context = createWriteContext( - workspaceBase, - [ - createProject(workspaceBase, 'alpha-project', ['alpha'], true), - createProject(workspaceBase, 'beta-project', ['beta']) - ], - [createCommandPrompt('project', 'alpha')], - [createSkillPrompt('project', 'alpha')], - {commands: 'project', skills: 'project'} - ) - - const declarations = await plugin.declareOutputFiles(context) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toContain(path.join(workspaceBase, 'alpha-project', '.windsurf', 'workflows', 'dev-build.md')) - expect(paths).toContain(path.join(workspaceBase, 'alpha-project', '.windsurf', 'skills', 'ship-it', 'SKILL.md')) - expect(paths.some(outputPath => outputPath.includes(path.join('beta-project', '.windsurf')))).toBe(false) - expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) - }) - - it('writes project-scoped workflows and skills into workspace root via the synthetic workspace project', async () => { - const workspaceBase = path.resolve('tmp/windsurf-workspace-project') - const plugin = new WindsurfOutputPlugin() - const context = createWriteContext( - workspaceBase, - [createWorkspaceRootProject()], - [createCommandPrompt('project', 'alpha')], - [createSkillPrompt('project', 'alpha')], - {commands: 'project', skills: 'project'} - ) - - const declarations = await plugin.declareOutputFiles(context) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toContain(path.join(workspaceBase, '.windsurf', 'workflows', 'dev-build.md')) - expect(paths).toContain(path.join(workspaceBase, '.windsurf', 'skills', 'ship-it', 'SKILL.md')) - expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) - }) - - it('writes project-scoped rules into workspace-root .windsurf/rules via the synthetic workspace project', async () => { - const workspaceBase = path.resolve('tmp/windsurf-workspace-rules') - const plugin = new WindsurfOutputPlugin() - const context = { - logger: createLogger('WindsurfOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [createWorkspaceRootProject()] - }, - rules: [createRulePrompt('project')] - } - } as OutputWriteContext - - const declarations = await plugin.declareOutputFiles(context) - - expect(declarations.map(declaration => declaration.path)).toContain( - path.join(workspaceBase, '.windsurf', 'rules', 'rule-ops-guard.md') - ) - expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) - }) -}) diff --git a/cli/src/plugins/WindsurfOutputPlugin.ts b/cli/src/plugins/WindsurfOutputPlugin.ts deleted file mode 100644 index d18a0795..00000000 --- a/cli/src/plugins/WindsurfOutputPlugin.ts +++ /dev/null @@ -1,278 +0,0 @@ -import type {CommandPrompt, OutputFileDeclaration, OutputWriteContext, RulePrompt, SkillPrompt} from './plugin-core' -import {Buffer} from 'node:buffer' -import * as path from 'node:path' -import {AbstractOutputPlugin, applySubSeriesGlobPrefix, filterByProjectConfig, PLUGIN_NAMES} from './plugin-core' - -const CODEIUM_WINDSURF_DIR = '.codeium/windsurf' -const WORKFLOWS_SUBDIR = 'global_workflows' -const PROJECT_WORKFLOWS_SUBDIR = 'workflows' -const MEMORIES_SUBDIR = 'memories' -const GLOBAL_MEMORY_FILE = 'global_rules.md' -const SKILLS_SUBDIR = 'skills' -const SKILL_FILE_NAME = 'SKILL.md' -const WINDSURF_RULES_DIR = '.windsurf' -const WINDSURF_RULES_SUBDIR = 'rules' -const RULE_FILE_PREFIX = 'rule-' - -type WindsurfOutputSource - = | {readonly kind: 'globalMemory', readonly content: string} - | {readonly kind: 'command', readonly command: CommandPrompt} - | {readonly kind: 'skillMain', readonly skill: SkillPrompt} - | {readonly kind: 'skillChildDoc', readonly content: string} - | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} - | {readonly kind: 'rule', readonly rule: RulePrompt} - | {readonly kind: 'ignoreFile', readonly content: string} - -export class WindsurfOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('WindsurfOutputPlugin', { - globalConfigDir: CODEIUM_WINDSURF_DIR, - outputFileName: '', - treatWorkspaceRootProjectAsProject: true, - dependsOn: [PLUGIN_NAMES.AgentsOutput], - indexignore: '.codeiumignore', - commands: { - subDir: WORKFLOWS_SUBDIR, - transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} - }, - skills: { - subDir: SKILLS_SUBDIR - }, - rules: { - sourceScopes: ['project', 'global'] - }, - cleanup: { - delete: { - project: { - dirs: ['.windsurf/rules', '.windsurf/workflows', '.windsurf/global_workflows', '.windsurf/skills', '.codeium/windsurf/global_workflows', '.codeium/windsurf/skills'] - }, - global: { - dirs: ['.codeium/windsurf/global_workflows', '.codeium/windsurf/memories', '.codeium/windsurf/skills'] - } - } - }, - capabilities: { - prompt: { - scopes: ['global'], - singleScope: false - }, - rules: { - scopes: ['project', 'global'], - singleScope: false - }, - commands: { - scopes: ['project', 'global'], - singleScope: true - }, - skills: { - scopes: ['project', 'global'], - singleScope: true - } - } - }) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {commands, skills, globalMemory, rules, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext - const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) - const globalBase = this.getCodeiumWindsurfDir() - const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['global'])) - const selectedCommands = commands != null - ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, command => this.resolveCommandSourceScope(command), this.getTopicScopeOverride(ctx, 'commands')) - : {items: [] as readonly CommandPrompt[]} - const selectedSkills = skills != null - ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) - : {items: [] as readonly SkillPrompt[]} - const concreteProjects = this.getConcreteProjects(ctx) - - if (globalMemory != null && activePromptScopes.has('global')) { - declarations.push({ - path: path.join(globalBase, MEMORIES_SUBDIR, GLOBAL_MEMORY_FILE), - scope: 'global', - source: { - kind: 'globalMemory', - content: globalMemory.content as string - } satisfies WindsurfOutputSource - }) - } - - const pushSkillDeclarations = ( - basePath: string, - scope: 'project' | 'global', - skill: SkillPrompt - ): void => { - const skillName = this.getSkillName(skill) - const skillDir = path.join(basePath, SKILLS_SUBDIR, skillName) - declarations.push({ - path: path.join(skillDir, SKILL_FILE_NAME), - scope, - source: {kind: 'skillMain', skill} satisfies WindsurfOutputSource - }) - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) { - declarations.push({ - path: path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), - scope, - source: { - kind: 'skillChildDoc', - content: childDoc.content as string - } satisfies WindsurfOutputSource - }) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - declarations.push({ - path: path.join(skillDir, resource.relativePath), - scope, - source: { - kind: 'skillResource', - content: resource.content, - encoding: resource.encoding - } satisfies WindsurfOutputSource - }) - } - } - } - - if (selectedSkills.selectedScope === 'project') { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - const projectBase = projectRootDir == null ? void 0 : path.join(projectRootDir, WINDSURF_RULES_DIR) - if (projectBase == null) continue - const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') - for (const skill of filteredSkills) pushSkillDeclarations(projectBase, 'project', skill) - } - } - - if (selectedSkills.selectedScope === 'global') { - const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') - for (const skill of filteredSkills) pushSkillDeclarations(globalBase, 'global', skill) - } - - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - if (selectedCommands.selectedScope === 'project') { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - const projectBase = projectRootDir == null ? void 0 : path.join(projectRootDir, WINDSURF_RULES_DIR) - if (projectBase == null) continue - const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') - for (const command of filteredCommands) { - declarations.push({ - path: path.join(projectBase, PROJECT_WORKFLOWS_SUBDIR, this.transformCommandName(command, transformOptions)), - scope: 'project', - source: {kind: 'command', command} satisfies WindsurfOutputSource - }) - } - } - } - - if (selectedCommands.selectedScope === 'global') { - const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') - for (const command of filteredCommands) { - declarations.push({ - path: path.join(globalBase, WORKFLOWS_SUBDIR, this.transformCommandName(command, transformOptions)), - scope: 'global', - source: {kind: 'command', command} satisfies WindsurfOutputSource - }) - } - } - - if (rules != null && rules.length > 0) { - const activeRuleScopes = new Set(this.selectRuleScopes(ctx, rules)) - const globalRules = rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'global') - if (activeRuleScopes.has('global')) { - for (const rule of globalRules) { - declarations.push({ - path: path.join(globalBase, MEMORIES_SUBDIR, this.buildRuleFileName(rule)), - scope: 'global', - source: {kind: 'rule', rule} satisfies WindsurfOutputSource - }) - } - } - - if (activeRuleScopes.has('project')) { - for (const project of this.getProjectOutputProjects(ctx)) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (projectRootDir == null) continue - - const projectRules = applySubSeriesGlobPrefix( - filterByProjectConfig(rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'project'), project.projectConfig, 'rules'), - project.projectConfig - ) - for (const rule of projectRules) { - declarations.push({ - path: path.join(projectRootDir, WINDSURF_RULES_DIR, WINDSURF_RULES_SUBDIR, this.buildRuleFileName(rule)), - scope: 'project', - source: {kind: 'rule', rule} satisfies WindsurfOutputSource - }) - } - } - } - } - - const ignoreOutputPath = this.getIgnoreOutputPath() - const ignoreFile = this.indexignore == null - ? void 0 - : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) - if (ignoreOutputPath != null && ignoreFile != null) { - for (const project of concreteProjects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null || project.isPromptSourceProject === true) continue - declarations.push({ - path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), - scope: 'project', - source: { - kind: 'ignoreFile', - content: ignoreFile.content - } satisfies WindsurfOutputSource - }) - } - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - const source = declaration.source as WindsurfOutputSource - switch (source.kind) { - case 'globalMemory': - case 'skillChildDoc': - case 'ignoreFile': return source.content - case 'command': return this.buildCommandContent(source.command, ctx) - case 'skillMain': { - const frontMatterData = this.buildSkillFrontMatter(source.skill) - return this.buildMarkdownContent(source.skill.content as string, frontMatterData, ctx) - } - case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content - case 'rule': return this.buildRuleContent(source.rule, ctx) - default: throw new Error(`Unsupported declaration source for ${this.name}`) - } - } - - private getCodeiumWindsurfDir(): string { return path.join(this.getHomeDir(), CODEIUM_WINDSURF_DIR) } - - protected override buildRuleFileName(rule: RulePrompt, prefix: string = RULE_FILE_PREFIX): string { - return `${prefix}${rule.prefix}-${rule.ruleName}.md` - } - - protected override buildRuleContent(rule: RulePrompt, ctx?: OutputWriteContext): string { - const fmData: Record = {trigger: 'glob', globs: rule.globs.length > 0 ? rule.globs.join(', ') : ''} - const raw = this.buildMarkdownContent(rule.content, fmData, ctx) - const lines = raw.split('\n') - return lines.map(line => { - const match = /^(\s*globs:\s*)(['"])(.*)\2\s*$/.exec(line) - if (match == null) return line - const prefix = match[1] ?? 'globs: ' - const value = match[3] ?? '' - if (value.trim().length === 0) return line - return `${prefix}${value}` - }).join('\n') - } -} diff --git a/cli/src/plugins/WslMirrorDeclarations.test.ts b/cli/src/plugins/WslMirrorDeclarations.test.ts deleted file mode 100644 index 69f48e58..00000000 --- a/cli/src/plugins/WslMirrorDeclarations.test.ts +++ /dev/null @@ -1,25 +0,0 @@ -import {describe, expect, it} from 'vitest' -import {ClaudeCodeCLIOutputPlugin} from './ClaudeCodeCLIOutputPlugin' -import {CodexCLIOutputPlugin} from './CodexCLIOutputPlugin' - -describe('wSL mirror declarations', () => { - it('declares the expected Claude host config files', async () => { - const plugin = new ClaudeCodeCLIOutputPlugin() - const declarations = await plugin.declareWslMirrorFiles?.({} as never) - - expect(declarations).toEqual([ - {sourcePath: '~/.claude/settings.json'}, - {sourcePath: '~/.claude/config.json'} - ]) - }) - - it('declares the expected Codex host config files', async () => { - const plugin = new CodexCLIOutputPlugin() - const declarations = await plugin.declareWslMirrorFiles?.({} as never) - - expect(declarations).toEqual([ - {sourcePath: '~/.codex/config.toml'}, - {sourcePath: '~/.codex/auth.json'} - ]) - }) -}) diff --git a/cli/src/plugins/ZedIDEConfigOutputPlugin.ts b/cli/src/plugins/ZedIDEConfigOutputPlugin.ts deleted file mode 100644 index 2da8d739..00000000 --- a/cli/src/plugins/ZedIDEConfigOutputPlugin.ts +++ /dev/null @@ -1,64 +0,0 @@ -import type { - OutputFileDeclaration, - OutputWriteContext -} from './plugin-core' -import {AbstractOutputPlugin, IDEKind} from './plugin-core' - -const ZED_DIR = '.zed' - -export class ZedIDEConfigOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('ZedIDEConfigOutputPlugin', { - cleanup: { - delete: { - project: { - files: ['.zed/settings.json'] - } - } - }, - capabilities: {} - }) - } - - override async declareOutputFiles(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {projects} = ctx.collectedOutputContext.workspace - const zedConfigs = ctx.collectedOutputContext.zedConfigFiles ?? [] - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - for (const config of zedConfigs) { - const targetRelativePath = this.getTargetRelativePath(config) - declarations.push({ - path: this.resolvePath(projectDir.basePath, projectDir.path, targetRelativePath), - scope: 'project', - source: {content: config.content} - }) - } - } - - return declarations - } - - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { - void ctx - const source = declaration.source as {content?: string} - if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) - return source.content - } - - private getTargetRelativePath(config: {type: IDEKind, dir: {path: string}}): string { - const sourcePath = config.dir.path - - if (config.type !== IDEKind.Zed) return this.basename(sourcePath) - - const zedIndex = sourcePath.indexOf(ZED_DIR) - if (zedIndex !== -1) return sourcePath.slice(Math.max(0, zedIndex)) - return this.joinPath(ZED_DIR, 'settings.json') - } -} diff --git a/cli/src/plugins/desk-paths.test.ts b/cli/src/plugins/desk-paths.test.ts deleted file mode 100644 index a266f9e5..00000000 --- a/cli/src/plugins/desk-paths.test.ts +++ /dev/null @@ -1,141 +0,0 @@ -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import {afterEach, describe, expect, it, vi} from 'vitest' - -import {deleteEmptyDirectories, deleteFiles, deleteTargets, getPlatformFixedDir} from '../core/desk-paths' - -const defaultNativeBinding = globalThis.__TNMSC_TEST_NATIVE_BINDING__ - -describe('desk paths', () => { - afterEach(() => { - vi.restoreAllMocks() - vi.clearAllMocks() - globalThis.__TNMSC_TEST_NATIVE_BINDING__ = defaultNativeBinding - }) - - it('delegates getPlatformFixedDir to the native binding', () => { - const getPlatformFixedDirMock = vi.fn(() => '/tmp/native-fixed-dir') - globalThis.__TNMSC_TEST_NATIVE_BINDING__ = { - ...defaultNativeBinding, - getPlatformFixedDir: getPlatformFixedDirMock - } - - expect(getPlatformFixedDir()).toBe('/tmp/native-fixed-dir') - expect(getPlatformFixedDirMock).toHaveBeenCalledOnce() - }) - - it('throws when the native desk-paths binding is unavailable', () => { - globalThis.__TNMSC_TEST_NATIVE_BINDING__ = void 0 - - expect(() => getPlatformFixedDir()).toThrow('Native desk-paths binding is required') - }) - - it('deletes mixed file and directory targets in one batch', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-desk-paths-delete-targets-')) - const outputFile = path.join(tempDir, 'output.txt') - const outputDir = path.join(tempDir, 'nested') - const nestedFile = path.join(outputDir, 'artifact.txt') - - try { - fs.mkdirSync(outputDir, {recursive: true}) - fs.writeFileSync(outputFile, 'file', 'utf8') - fs.writeFileSync(nestedFile, 'nested', 'utf8') - - const result = await deleteTargets({ - files: [outputFile], - dirs: [outputDir] - }) - - expect(result.deletedFiles).toEqual([outputFile]) - expect(result.deletedDirs).toEqual([outputDir]) - expect(result.fileErrors).toEqual([]) - expect(result.dirErrors).toEqual([]) - expect(fs.existsSync(outputFile)).toBe(false) - expect(fs.existsSync(outputDir)).toBe(false) - } - finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('caps delete file concurrency to the configured worker limit', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-desk-paths-concurrency-')) - const files = Array.from({length: 40}, (_, index) => path.join(tempDir, `artifact-${index}.txt`)) - let active = 0 - let maxActive = 0 - const originalLstat = fs.promises.lstat.bind(fs.promises) - - try { - fs.mkdirSync(tempDir, {recursive: true}) - for (const filePath of files) fs.writeFileSync(filePath, 'artifact', 'utf8') - - vi.spyOn(fs.promises, 'lstat').mockImplementation(async filePath => { - active += 1 - maxActive = Math.max(maxActive, active) - await new Promise(resolve => setTimeout(resolve, 20)) - - try { - return await originalLstat(filePath) - } - finally { - active -= 1 - } - }) - - const result = await deleteFiles(files) - - expect(result.deleted).toBe(files.length) - expect(result.errors).toEqual([]) - expect(maxActive).toBeLessThanOrEqual(32) - expect(maxActive).toBeGreaterThan(1) - } - finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('deletes only empty directories from deepest to shallowest', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-desk-paths-empty-dirs-')) - const parentDir = path.join(tempDir, 'empty-parent') - const childDir = path.join(parentDir, 'leaf') - const nonEmptyDir = path.join(tempDir, 'non-empty') - - try { - fs.mkdirSync(childDir, {recursive: true}) - fs.mkdirSync(nonEmptyDir, {recursive: true}) - fs.writeFileSync(path.join(nonEmptyDir, 'keep.txt'), 'keep', 'utf8') - - const result = await deleteEmptyDirectories([parentDir, childDir, nonEmptyDir]) - - expect(result.deleted).toBe(2) - expect(result.deletedPaths).toEqual([childDir, parentDir]) - expect(result.errors).toEqual([]) - expect(fs.existsSync(parentDir)).toBe(false) - expect(fs.existsSync(nonEmptyDir)).toBe(true) - } - finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('skips directories that become non-empty before empty-directory deletion runs', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-desk-paths-empty-race-')) - const targetDir = path.join(tempDir, 'maybe-empty') - - try { - fs.mkdirSync(targetDir, {recursive: true}) - fs.writeFileSync(path.join(targetDir, 'new-file.txt'), 'late write', 'utf8') - - const result = await deleteEmptyDirectories([targetDir, path.join(tempDir, 'missing')]) - - expect(result.deleted).toBe(0) - expect(result.deletedPaths).toEqual([]) - expect(result.errors).toEqual([]) - expect(fs.existsSync(targetDir)).toBe(true) - } - finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) -}) diff --git a/cli/src/plugins/desk-paths.ts b/cli/src/plugins/desk-paths.ts deleted file mode 100644 index add7c1dd..00000000 --- a/cli/src/plugins/desk-paths.ts +++ /dev/null @@ -1 +0,0 @@ -export * from '../core/desk-paths' diff --git a/cli/src/plugins/ide-config-output.test.ts b/cli/src/plugins/ide-config-output.test.ts deleted file mode 100644 index 1fc47b65..00000000 --- a/cli/src/plugins/ide-config-output.test.ts +++ /dev/null @@ -1,238 +0,0 @@ -import type { - OutputWriteContext, - Project, - ProjectIDEConfigFile -} from './plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {EditorConfigOutputPlugin} from './EditorConfigOutputPlugin' -import {JetBrainsIDECodeStyleConfigOutputPlugin} from './JetBrainsIDECodeStyleConfigOutputPlugin' -import {createLogger, FilePathKind, IDEKind} from './plugin-core' -import {VisualStudioCodeIDEConfigOutputPlugin} from './VisualStudioCodeIDEConfigOutputPlugin' -import {ZedIDEConfigOutputPlugin} from './ZedIDEConfigOutputPlugin' - -function createProject( - workspaceBase: string, - name: string, - promptSource = false -): Project { - return { - name, - isPromptSourceProject: promptSource, - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: name, - basePath: workspaceBase, - getDirectoryName: () => name, - getAbsolutePath: () => path.join(workspaceBase, name) - } - } as Project -} - -function createConfigFile( - type: IDEKind, - sourcePath: string, - content: string -): ProjectIDEConfigFile { - return { - type, - content, - length: content.length, - filePathKind: FilePathKind.Absolute, - dir: { - pathKind: FilePathKind.Absolute, - path: sourcePath, - getDirectoryName: () => path.basename(sourcePath) - } - } as ProjectIDEConfigFile -} - -function createWriteContext(workspaceBase: string): OutputWriteContext { - return { - logger: createLogger('IdeConfigOutputPluginTest', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => path.basename(workspaceBase) - }, - projects: [ - createProject(workspaceBase, 'aindex', true), - createProject(workspaceBase, 'memory-sync') - ] - }, - editorConfigFiles: [ - createConfigFile( - IDEKind.EditorConfig, - path.join(workspaceBase, 'aindex', 'public', '.editorconfig'), - 'root = true\n' - ) - ], - vscodeConfigFiles: [ - createConfigFile( - IDEKind.VSCode, - path.join( - workspaceBase, - 'aindex', - 'public', - '.vscode', - 'settings.json' - ), - '{}\n' - ), - createConfigFile( - IDEKind.VSCode, - path.join( - workspaceBase, - 'aindex', - 'public', - '.vscode', - 'extensions.json' - ), - '{}\n' - ) - ], - zedConfigFiles: [ - createConfigFile( - IDEKind.Zed, - path.join(workspaceBase, 'aindex', 'public', '.zed', 'settings.json'), - '{"tab_size": 2}\n' - ) - ], - jetbrainsConfigFiles: [ - createConfigFile( - IDEKind.IntellijIDEA, - path.join(workspaceBase, 'aindex', 'public', '.idea', '.gitignore'), - '/workspace.xml\n' - ), - createConfigFile( - IDEKind.IntellijIDEA, - path.join( - workspaceBase, - 'aindex', - 'public', - '.idea', - 'codeStyles', - 'Project.xml' - ), - '\n' - ), - createConfigFile( - IDEKind.IntellijIDEA, - path.join( - workspaceBase, - 'aindex', - 'public', - '.idea', - 'codeStyles', - 'codeStyleConfig.xml' - ), - '\n' - ) - ] - } - } as OutputWriteContext -} - -describe('ide config output plugins', () => { - it('includes the prompt source project for editorconfig output', async () => { - const workspaceBase = path.resolve('tmp/ide-output-editorconfig') - const plugin = new EditorConfigOutputPlugin() - const declarations = await plugin.declareOutputFiles( - createWriteContext(workspaceBase) - ) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toEqual([ - path.join(workspaceBase, 'aindex', '.editorconfig'), - path.join(workspaceBase, 'memory-sync', '.editorconfig') - ]) - }) - - it('includes the prompt source project for vscode output', async () => { - const workspaceBase = path.resolve('tmp/ide-output-vscode') - const plugin = new VisualStudioCodeIDEConfigOutputPlugin() - const declarations = await plugin.declareOutputFiles( - createWriteContext(workspaceBase) - ) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toEqual([ - path.join(workspaceBase, 'aindex', '.vscode', 'settings.json'), - path.join(workspaceBase, 'aindex', '.vscode', 'extensions.json'), - path.join(workspaceBase, 'memory-sync', '.vscode', 'settings.json'), - path.join(workspaceBase, 'memory-sync', '.vscode', 'extensions.json') - ]) - }) - - it('includes the prompt source project for zed output and cleanup', async () => { - const workspaceBase = path.resolve('tmp/ide-output-zed') - const plugin = new ZedIDEConfigOutputPlugin() - const ctx = createWriteContext(workspaceBase) - const declarations = await plugin.declareOutputFiles(ctx) - const cleanup = await plugin.declareCleanupPaths(ctx) - - expect(declarations.map(declaration => declaration.path)).toEqual([ - path.join(workspaceBase, 'aindex', '.zed', 'settings.json'), - path.join(workspaceBase, 'memory-sync', '.zed', 'settings.json') - ]) - expect(cleanup.delete).toEqual([ - { - kind: 'file', - label: 'delete.project', - path: path.join(workspaceBase, 'aindex', '.zed', 'settings.json'), - scope: 'project' - }, - { - kind: 'file', - label: 'delete.project', - path: path.join(workspaceBase, 'memory-sync', '.zed', 'settings.json'), - scope: 'project' - } - ]) - }) - - it('includes the prompt source project for jetbrains output', async () => { - const workspaceBase = path.resolve('tmp/ide-output-jetbrains') - const plugin = new JetBrainsIDECodeStyleConfigOutputPlugin() - const declarations = await plugin.declareOutputFiles( - createWriteContext(workspaceBase) - ) - const paths = declarations.map(declaration => declaration.path) - - expect(paths).toEqual([ - path.join(workspaceBase, 'aindex', '.idea', '.gitignore'), - path.join(workspaceBase, 'aindex', '.idea', 'codeStyles', 'Project.xml'), - path.join( - workspaceBase, - 'aindex', - '.idea', - 'codeStyles', - 'codeStyleConfig.xml' - ), - path.join(workspaceBase, 'aindex', '.editorconfig'), - path.join(workspaceBase, 'memory-sync', '.idea', '.gitignore'), - path.join( - workspaceBase, - 'memory-sync', - '.idea', - 'codeStyles', - 'Project.xml' - ), - path.join( - workspaceBase, - 'memory-sync', - '.idea', - 'codeStyles', - 'codeStyleConfig.xml' - ), - path.join(workspaceBase, 'memory-sync', '.editorconfig') - ]) - }) -}) diff --git a/cli/src/plugins/plugin-agentskills-compact.ts b/cli/src/plugins/plugin-agentskills-compact.ts deleted file mode 100644 index abe6e9b6..00000000 --- a/cli/src/plugins/plugin-agentskills-compact.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - GenericSkillsOutputPlugin -} from './GenericSkillsOutputPlugin' diff --git a/cli/src/plugins/plugin-agentsmd.ts b/cli/src/plugins/plugin-agentsmd.ts deleted file mode 100644 index 2a8505e4..00000000 --- a/cli/src/plugins/plugin-agentsmd.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - AgentsOutputPlugin -} from './AgentsOutputPlugin' diff --git a/cli/src/plugins/plugin-claude-code-cli.ts b/cli/src/plugins/plugin-claude-code-cli.ts deleted file mode 100644 index e65d3791..00000000 --- a/cli/src/plugins/plugin-claude-code-cli.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - ClaudeCodeCLIOutputPlugin -} from './ClaudeCodeCLIOutputPlugin' diff --git a/cli/src/plugins/plugin-core.ts b/cli/src/plugins/plugin-core.ts deleted file mode 100644 index 20b28833..00000000 --- a/cli/src/plugins/plugin-core.ts +++ /dev/null @@ -1,172 +0,0 @@ -import type { - ILogger, - LoggerDiagnosticRecord, - LogLevel -} from '@truenine/logger' -import { - clearBufferedDiagnostics as clearBufferedDiagnosticsNative, - createLogger as createLoggerNative, - drainBufferedDiagnostics as drainBufferedDiagnosticsNative, - getGlobalLogLevel as getGlobalLogLevelNative, - setGlobalLogLevel as setGlobalLogLevelNative -} from '@truenine/logger' - -export { - AbstractInputCapability -} from '../inputs/AbstractInputCapability' - -export { - AbstractOutputPlugin -} from './plugin-core/AbstractOutputPlugin' - -export type { - AbstractOutputPluginOptions, - CleanupScopePathsConfig, - CombineOptions, - CommandNameTransformOptions, - CommandOutputConfig, - OutputCleanupConfig, - RuleContentOptions, - RuleOutputConfig, - SkillFrontMatterOptions, - SkillsOutputConfig, - SubAgentArtifactFormat, - SubAgentFileNameSource, - SubAgentNameTransformOptions, - SubAgentsOutputConfig -} from './plugin-core/AbstractOutputPlugin' - -export { - AbstractPlugin -} from './plugin-core/AbstractPlugin' - -export { - DEFAULT_USER_CONFIG, - FileExtensions, - FrontMatterFields, - GlobalConfigDirs, - hasSourcePromptExtension, - IgnoreFiles, - OutputFileNames, - OutputPrefixes, - OutputSubdirectories, - PathPlaceholders, - PLUGIN_NAMES, - PreservedSkills, - SourceLocaleExtensions, - SourcePromptExtensions, - SourcePromptFileExtensions, - ToolPresets, - WORKSPACE_ROOT_PROJECT_NAME -} from './plugin-core/constants' - -export type { - PluginName -} from './plugin-core/constants' - -export { - validateCommandMetadata, - validateRuleMetadata, - validateSkillMetadata, - validateSubAgentMetadata -} from './plugin-core/ExportMetadataTypes' - -export { - applySubSeriesGlobPrefix, - filterByProjectConfig, - findAllGitRepos, - findGitModuleInfoDirs, - resolveGitInfoDir -} from './plugin-core/filters' - -export type { - FilterConfigPath, - SeriesFilterable -} from './plugin-core/filters' - -export { - GlobalScopeCollector -} from './plugin-core/GlobalScopeCollector' - -export type { - GlobalScopeCollectorOptions, - ScopeRegistration -} from './plugin-core/GlobalScopeCollector' - -export { - ScopePriority, - ScopeRegistry -} from './plugin-core/GlobalScopeCollector' - -export { - createLocalizedPromptReader, - LocalizedPromptReader -} from './plugin-core/LocalizedPromptReader' - -export { - collectMcpServersFromSkills, - McpConfigManager, - transformMcpConfigForCursor, - transformMcpConfigForOpencode, - transformMcpServerMap -} from './plugin-core/McpConfigManager' - -export type { - McpConfigFormat, - McpConfigTransformer, - McpServerEntry, - McpWriteResult, - TransformedMcpConfig -} from './plugin-core/McpConfigManager' - -export { - clearPromptArtifactCache, - compileRawPromptArtifact, - readPromptArtifact -} from './plugin-core/PromptArtifactCache' - -export { - deriveSubAgentIdentity, - flattenPromptPath, - resolveSkillName, - resolveSubAgentCanonicalName -} from './plugin-core/PromptIdentity' - -export { - RegistryWriter -} from './plugin-core/RegistryWriter' - -export { - DEFAULT_SCOPE_PRIORITY, - resolveTopicScopes -} from './plugin-core/scopePolicy' - -export * from './plugin-core/types' - -export type { - DiagnosticLines, - ILogger, - LoggerDiagnosticInput, - LoggerDiagnosticRecord, - LogLevel -} from '@truenine/logger' - -export function clearBufferedDiagnostics(): void { - clearBufferedDiagnosticsNative() -} - -export function createLogger(namespace: string, logLevel?: LogLevel): ILogger { - return createLoggerNative(namespace, logLevel) -} - -export function drainBufferedDiagnostics(): LoggerDiagnosticRecord[] { - return drainBufferedDiagnosticsNative() -} - -export function getGlobalLogLevel(): LogLevel | undefined { - return getGlobalLogLevelNative() -} - -export function setGlobalLogLevel(level: LogLevel): void { - setGlobalLogLevelNative(level) -} diff --git a/cli/src/plugins/plugin-core/AbstractOutputPlugin.frontmatter.test.ts b/cli/src/plugins/plugin-core/AbstractOutputPlugin.frontmatter.test.ts deleted file mode 100644 index e0393c18..00000000 --- a/cli/src/plugins/plugin-core/AbstractOutputPlugin.frontmatter.test.ts +++ /dev/null @@ -1,204 +0,0 @@ -import type {CommandPrompt, OutputWriteContext, RulePrompt, SkillPrompt, SubAgentPrompt} from './types' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {createLogger, FilePathKind, PromptKind} from '../plugin-core' -import {AbstractOutputPlugin} from './AbstractOutputPlugin' - -class TestFrontMatterOutputPlugin extends AbstractOutputPlugin { - constructor(options?: ConstructorParameters[1]) { - super('TestFrontMatterOutputPlugin', { - globalConfigDir: '.tool', - outputFileName: '', - commands: { - sourceScopes: ['project'], - transformFrontMatter: () => ({description: 'command'}) - }, - subagents: { - sourceScopes: ['project'] - }, - skills: {}, - rules: { - sourceScopes: ['project'] - }, - ...options - }) - } - - async renderCommand(cmd: CommandPrompt, ctx: OutputWriteContext): Promise { - return this.buildCommandContent(cmd, ctx) - } - - renderSubAgent(agent: SubAgentPrompt, ctx: OutputWriteContext): string { - return this.buildSubAgentContent(agent, ctx) - } - - renderSkill(skill: SkillPrompt, ctx: OutputWriteContext): string { - return this.buildSkillMainContent(skill, ctx) - } - - renderRule(rule: RulePrompt, ctx: OutputWriteContext): string { - return this.buildRuleContent(rule, ctx) - } -} - -function createWriteContext(blankLineAfter?: boolean): OutputWriteContext { - const workspaceBase = path.resolve('tmp/frontmatter-workspace') - return { - logger: createLogger('TestFrontMatterOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - pluginOptions: blankLineAfter == null - ? {} - : { - frontMatter: { - blankLineAfter - } - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => 'workspace' - }, - projects: [] - } - } - } as OutputWriteContext -} - -function createCommandPrompt(): CommandPrompt { - return { - type: PromptKind.Command, - content: 'command content', - length: 15, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'dev/build.mdx', - basePath: path.resolve('tmp/dist/commands'), - getDirectoryName: () => 'build', - getAbsolutePath: () => path.resolve('tmp/dist/commands/dev/build.mdx') - }, - commandPrefix: 'dev', - commandName: 'build', - yamlFrontMatter: { - namingCase: 'kebabCase', - description: 'Build command' - }, - markdownContents: [] - } as CommandPrompt -} - -function createSubAgentPrompt(): SubAgentPrompt { - return { - type: PromptKind.SubAgent, - content: 'subagent content', - length: 16, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'qa/boot.mdx', - basePath: path.resolve('tmp/dist/subagents'), - getDirectoryName: () => 'boot', - getAbsolutePath: () => path.resolve('tmp/dist/subagents/qa/boot.mdx') - }, - agentPrefix: 'qa', - agentName: 'boot', - canonicalName: 'qa-boot', - yamlFrontMatter: { - namingCase: 'kebabCase', - description: 'subagent desc' - }, - markdownContents: [] - } as SubAgentPrompt -} - -function createSkillPrompt(): SkillPrompt { - return { - type: PromptKind.Skill, - content: 'skill content', - length: 13, - filePathKind: FilePathKind.Relative, - skillName: 'ship-it', - dir: { - pathKind: FilePathKind.Relative, - path: 'skills/ship-it', - basePath: path.resolve('tmp/dist/skills'), - getDirectoryName: () => 'ship-it', - getAbsolutePath: () => path.resolve('tmp/dist/skills/ship-it') - }, - yamlFrontMatter: { - namingCase: 'kebabCase', - name: 'ship-it', - description: 'Ship release' - }, - markdownContents: [] - } as SkillPrompt -} - -function createRulePrompt(): RulePrompt { - return { - type: PromptKind.Rule, - content: 'rule content', - length: 12, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'rules/frontend.mdx', - basePath: path.resolve('tmp/dist/rules'), - getDirectoryName: () => 'frontend', - getAbsolutePath: () => path.resolve('tmp/dist/rules/frontend.mdx') - }, - prefix: 'frontend', - ruleName: 'guard', - globs: ['src/**'], - scope: 'project', - yamlFrontMatter: { - namingCase: 'kebabCase', - description: 'Rule desc' - }, - markdownContents: [] - } as RulePrompt -} - -describe('abstract output plugin front matter formatting', () => { - it('adds a blank line after front matter by default for command/rule/subagent/skill outputs', async () => { - const plugin = new TestFrontMatterOutputPlugin() - const ctx = createWriteContext() - - await expect(plugin.renderCommand(createCommandPrompt(), ctx)).resolves.toMatch(/\n---\n\ncommand content$/) - expect(plugin.renderRule(createRulePrompt(), ctx)).toMatch(/\n---\n\nrule content$/) - expect(plugin.renderSubAgent(createSubAgentPrompt(), ctx)).toMatch(/\n---\n\nsubagent content$/) - expect(plugin.renderSkill(createSkillPrompt(), ctx)).toMatch(/\n---\n\nskill content$/) - }) - - it('keeps the derived skill name in raw skill front matter output', () => { - const plugin = new TestFrontMatterOutputPlugin() - const ctx = createWriteContext() - - expect(plugin.renderSkill(createSkillPrompt(), ctx)).toContain('name: ship-it') - }) - - it('removes the extra blank line when frontMatter.blankLineAfter is false', async () => { - const plugin = new TestFrontMatterOutputPlugin() - const ctx = createWriteContext(false) - - await expect(plugin.renderCommand(createCommandPrompt(), ctx)).resolves.toMatch(/\n---\ncommand content$/) - expect(plugin.renderRule(createRulePrompt(), ctx)).toMatch(/\n---\nrule content$/) - expect(plugin.renderSubAgent(createSubAgentPrompt(), ctx)).toMatch(/\n---\nsubagent content$/) - expect(plugin.renderSkill(createSkillPrompt(), ctx)).toMatch(/\n---\nskill content$/) - }) - - it('keeps the blank line when a plugin opts out of the shared switch', async () => { - const plugin = new TestFrontMatterOutputPlugin({ - supportsBlankLineAfterFrontMatter: false - }) - const ctx = createWriteContext(false) - - await expect(plugin.renderCommand(createCommandPrompt(), ctx)).resolves.toMatch(/\n---\n\ncommand content$/) - }) -}) diff --git a/cli/src/plugins/plugin-core/AbstractOutputPlugin.subagents.test.ts b/cli/src/plugins/plugin-core/AbstractOutputPlugin.subagents.test.ts deleted file mode 100644 index 79fe1470..00000000 --- a/cli/src/plugins/plugin-core/AbstractOutputPlugin.subagents.test.ts +++ /dev/null @@ -1,114 +0,0 @@ -import type {OutputWriteContext, SubAgentPrompt} from './types' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {createLogger, FilePathKind, PromptKind} from '../plugin-core' -import {AbstractOutputPlugin} from './AbstractOutputPlugin' - -class TestSubAgentOutputPlugin extends AbstractOutputPlugin { - constructor(options?: ConstructorParameters[1]) { - super('TestSubAgentOutputPlugin', { - globalConfigDir: '.tool', - outputFileName: '', - subagents: { - sourceScopes: ['project'], - ...options?.subagents - } - }) - } -} - -function createSubAgentPrompt(): SubAgentPrompt { - return { - type: PromptKind.SubAgent, - content: 'subagent content', - length: 16, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: 'qa/boot.mdx', - basePath: path.resolve('tmp/dist/subagents'), - getDirectoryName: () => 'boot', - getAbsolutePath: () => path.resolve('tmp/dist/subagents/qa/boot.mdx') - }, - agentPrefix: 'qa', - agentName: 'boot', - canonicalName: 'qa-boot', - yamlFrontMatter: { - namingCase: 'kebabCase', - description: 'subagent desc' - }, - markdownContents: [] - } as SubAgentPrompt -} - -function createWriteContext(subAgents: readonly SubAgentPrompt[]): OutputWriteContext { - const workspaceBase = path.resolve('tmp/workspace') - return { - logger: createLogger('TestSubAgentOutputPlugin', 'error'), - fs, - path, - glob: {} as never, - dryRun: true, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceBase, - getDirectoryName: () => 'workspace' - }, - projects: [{ - name: 'demo', - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'demo', - basePath: workspaceBase, - getDirectoryName: () => 'demo', - getAbsolutePath: () => path.join(workspaceBase, 'demo') - } - }] - }, - subAgents - } - } as OutputWriteContext -} - -describe('abstract output plugin subagent naming', () => { - it('uses prefix-agent.ext by default', async () => { - const plugin = new TestSubAgentOutputPlugin() - const declarations = await plugin.declareOutputFiles(createWriteContext([createSubAgentPrompt()])) - const [declaration] = declarations - - expect(declaration?.path.endsWith(path.join('.tool', 'agents', 'qa-boot.md'))).toBe(true) - }) - - it('supports custom linkSymbol and ext for subagent output names', async () => { - const plugin = new TestSubAgentOutputPlugin({ - subagents: { - sourceScopes: ['project'], - linkSymbol: '_', - ext: '.markdown' - } - }) - const declarations = await plugin.declareOutputFiles(createWriteContext([createSubAgentPrompt()])) - const [declaration] = declarations - - expect(declaration?.path.endsWith(path.join('.tool', 'agents', 'qa_boot.markdown'))).toBe(true) - }) - - it('supports subagents.transformFrontMatter declaratively', async () => { - const plugin = new TestSubAgentOutputPlugin({ - subagents: { - sourceScopes: ['project'], - transformFrontMatter: () => ({role: 'qa'}) - } - }) - const declarations = await plugin.declareOutputFiles(createWriteContext([createSubAgentPrompt()])) - const [declaration] = declarations - if (declaration == null) throw new Error('Expected one subagent declaration') - - const content = await plugin.convertContent(declaration, createWriteContext([createSubAgentPrompt()])) - expect(String(content)).toContain('role:') - expect(String(content)).toContain('subagent content') - }) -}) diff --git a/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts b/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts deleted file mode 100644 index 33874f07..00000000 --- a/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts +++ /dev/null @@ -1,1424 +0,0 @@ -import type {BuildPromptTomlArtifactOptions} from '@truenine/md-compiler' -import type {ToolPresetName} from './GlobalScopeCollector' -import type {RegistryWriter} from './RegistryWriter' -import type { - CommandPrompt, - CommandSeriesPluginOverride, - ILogger, - OutputCleanContext, - OutputCleanupDeclarations, - OutputCleanupPathDeclaration, - OutputCleanupScope, - OutputDeclarationScope, - OutputFileDeclaration, - OutputPlugin, - OutputPluginCapabilities, - OutputPluginContext, - OutputScopeSelection, - OutputScopeTopic, - OutputTopicCapability, - OutputWriteContext, - Path, - Project, - ProjectConfig, - RegistryData, - RegistryOperationResult, - RulePrompt, - RuleScope, - SkillPrompt, - SubAgentPrompt, - WslMirrorFileDeclaration -} from './types' - -import {Buffer} from 'node:buffer' -import * as path from 'node:path' -import process from 'node:process' -import {buildPromptTomlArtifact} from '@truenine/md-compiler' -import {buildMarkdownWithFrontMatter, buildMarkdownWithRawFrontMatter} from '@truenine/md-compiler/markdown' -import {buildConfigDiagnostic, diagnosticLines} from '@/diagnostics' -import {getEffectiveHomeDir} from '@/runtime-environment' -import {AbstractPlugin} from './AbstractPlugin' -import {FilePathKind, PluginKind} from './enums' -import {applySubSeriesGlobPrefix, filterByProjectConfig} from './filters' -import {GlobalScopeCollector} from './GlobalScopeCollector' -import {compileRawPromptArtifact} from './PromptArtifactCache' -import {resolveSkillName, resolveSubAgentCanonicalName} from './PromptIdentity' -import {resolveTopicScopes} from './scopePolicy' -import {OUTPUT_SCOPE_TOPICS} from './types' - -interface ScopedSourceConfig { - /** Allowed source scopes for the topic */ - readonly sourceScopes?: readonly OutputDeclarationScope[] - /** Optional source-scope remap before output selection */ - readonly scopeRemap?: Partial> -} - -/** - * Options for building skill front matter - */ -export interface SkillFrontMatterOptions { - readonly includeTools?: boolean - readonly toolFormat?: 'array' | 'string' - readonly additionalFields?: Record -} - -/** - * Options for building rule content - */ -export interface RuleContentOptions { - readonly fileExtension: '.mdc' | '.md' - readonly alwaysApply: boolean - readonly globJoinPattern: ', ' | '|' | string - readonly frontMatterFormatter?: (globs: string) => unknown - readonly additionalFrontMatter?: Record -} - -/** - * Rule output configuration (declarative) - */ -export interface RuleOutputConfig { - /** Rules subdirectory, default 'rules' */ - readonly subDir?: string - /** Link symbol between series and ruleName, default '-' */ - readonly linkSymbol?: string - /** Rule file prefix, default 'rule' */ - readonly prefix?: string - /** Rule file extension, default '.md' */ - readonly ext?: string - /** Custom frontmatter transformer */ - readonly transformFrontMatter?: (rule: RulePrompt) => Record - /** Allowed rule source scopes, default ['project', 'global'] */ - readonly sourceScopes?: readonly OutputDeclarationScope[] -} - -/** - * Command output configuration (declarative) - */ -export interface CommandOutputConfig { - /** Commands subdirectory, default 'commands' */ - readonly subDir?: string - /** Custom command frontmatter transformer */ - readonly transformFrontMatter?: ( - cmd: CommandPrompt, - context: { - readonly sourceFrontMatter?: Record - readonly isRecompiled: boolean - } - ) => Record - /** Allowed command source scopes, default ['project', 'global'] */ - readonly sourceScopes?: readonly OutputDeclarationScope[] - /** Optional source-scope remap before output selection */ - readonly scopeRemap?: Partial> -} - -export type SubAgentArtifactFormat = 'markdown' | 'toml' -export type SubAgentFileNameSource = 'derivedPath' | 'frontMatterName' - -/** - * SubAgent output configuration (declarative) - */ -export interface SubAgentsOutputConfig extends ScopedSourceConfig { - /** SubAgents subdirectory, default 'agents' */ - readonly subDir?: string - /** Whether to include input-derived prefix in output filename, default true */ - readonly includePrefix?: boolean - /** Separator between prefix and agent name, default '-' */ - readonly linkSymbol?: string - /** SubAgent file extension, default '.md' */ - readonly ext?: string - /** Output artifact format, default 'markdown' */ - readonly artifactFormat?: SubAgentArtifactFormat - /** Field name that receives prompt body when artifactFormat='toml' */ - readonly bodyFieldName?: string - /** Source for output file name, default 'derivedPath' */ - readonly fileNameSource?: SubAgentFileNameSource - /** Front matter field remap before artifact emission */ - readonly fieldNameMap?: Readonly> - /** Front matter fields to exclude from artifact emission */ - readonly excludedFrontMatterFields?: readonly string[] - /** Additional fields injected into emitted artifact */ - readonly extraFields?: Readonly> - /** Preferred root-level field order for emitted artifact */ - readonly fieldOrder?: readonly string[] - /** Optional frontmatter transformer */ - readonly transformFrontMatter?: ( - subAgent: SubAgentPrompt, - context: { - readonly sourceFrontMatter?: Record - } - ) => Record -} - -/** - * Skills output configuration (declarative) - */ -export interface SkillsOutputConfig extends ScopedSourceConfig { - /** Skills subdirectory, default 'skills' */ - readonly subDir?: string -} - -/** - * Options for transforming command names in output filenames. - * Used by transformCommandName method to control prefix handling. - */ -export interface CommandNameTransformOptions { - readonly includeSeriesPrefix?: boolean - readonly seriesSeparator?: string -} - -/** - * Options for transforming subagent names in output filenames. - */ -export interface SubAgentNameTransformOptions { - readonly includePrefix?: boolean - readonly linkSymbol?: string - readonly ext?: string -} - -/** - * Cleanup path entries for one scope. - * Relative paths are resolved by scope base: - * - project: project root - * - global: user home - * - xdgConfig: XDG config home (defaults to ~/.config) - */ -export interface CleanupScopePathsConfig { - readonly files?: readonly string[] - readonly dirs?: readonly string[] - readonly globs?: readonly string[] -} - -/** - * Declarative cleanup configuration for output plugins. - */ -export interface OutputCleanupConfig { - readonly delete?: Partial> - readonly protect?: Partial> - readonly excludeScanGlobs?: readonly string[] -} - -/** - * Options for configuring AbstractOutputPlugin subclasses. - */ -export interface AbstractOutputPluginOptions { - globalConfigDir?: string - - outputFileName?: string - - treatWorkspaceRootProjectAsProject?: boolean - - dependsOn?: readonly string[] - - indexignore?: string - - /** Command output configuration (declarative) */ - commands?: CommandOutputConfig - - /** SubAgent output configuration (declarative) */ - subagents?: SubAgentsOutputConfig - - /** Skills output configuration (declarative) */ - skills?: SkillsOutputConfig - - toolPreset?: ToolPresetName - - /** Rule output configuration (declarative) */ - rules?: RuleOutputConfig - - /** Cleanup configuration (declarative) */ - cleanup?: OutputCleanupConfig - - /** Host-home files that should be mirrored into configured WSL instances */ - wslMirrors?: readonly string[] - - /** Explicit output capability matrix for scope override validation */ - capabilities?: OutputPluginCapabilities - - /** Whether this plugin honors the shared blank-line-after-front-matter option */ - supportsBlankLineAfterFrontMatter?: boolean -} - -/** - * Options for combining global content with project content. - */ -export interface CombineOptions { - separator?: string - - skipIfEmpty?: boolean - - position?: 'before' | 'after' -} - -type DeclarativeOutputSource - = | {readonly kind: 'projectRootMemory', readonly content: string} - | {readonly kind: 'projectChildMemory', readonly content: string} - | {readonly kind: 'globalMemory', readonly content: string} - | {readonly kind: 'command', readonly command: CommandPrompt} - | {readonly kind: 'subAgent', readonly subAgent: SubAgentPrompt} - | {readonly kind: 'skillMain', readonly skill: SkillPrompt} - | {readonly kind: 'skillReference', readonly content: string} - | { - readonly kind: 'skillResource' - readonly content: string - readonly encoding: 'text' | 'base64' - } - | {readonly kind: 'rule', readonly rule: RulePrompt} - | {readonly kind: 'ignoreFile', readonly content: string} - -export abstract class AbstractOutputPlugin extends AbstractPlugin implements OutputPlugin { - readonly declarativeOutput = true as const - - readonly outputCapabilities: OutputPluginCapabilities - - protected readonly globalConfigDir: string - - protected readonly outputFileName: string - - protected readonly treatWorkspaceRootProjectAsProject: boolean - - protected readonly indexignore: string | undefined - - protected readonly commandsConfig: { - readonly subDir: string - readonly transformFrontMatter?: ( - cmd: CommandPrompt, - context: { - readonly sourceFrontMatter?: Record - readonly isRecompiled: boolean - } - ) => Record - readonly sourceScopes: readonly OutputDeclarationScope[] - readonly scopeRemap?: Partial> - } - - protected readonly subAgentsConfig: { - readonly subDir: string - readonly sourceScopes: readonly OutputDeclarationScope[] - readonly includePrefix: boolean - readonly linkSymbol: string - readonly ext: string - readonly artifactFormat: SubAgentArtifactFormat - readonly bodyFieldName?: string - readonly fileNameSource: SubAgentFileNameSource - readonly fieldNameMap?: Readonly> - readonly excludedFrontMatterFields?: readonly string[] - readonly extraFields?: Readonly> - readonly fieldOrder?: readonly string[] - readonly scopeRemap?: Partial> - readonly transformFrontMatter?: ( - subAgent: SubAgentPrompt, - context: { - readonly sourceFrontMatter?: Record - } - ) => Record - } - - protected readonly commandOutputEnabled: boolean - - protected readonly subAgentOutputEnabled: boolean - - protected readonly skillsConfig: { - readonly subDir: string - readonly sourceScopes: readonly OutputDeclarationScope[] - readonly scopeRemap?: Partial> - } - - protected readonly skillOutputEnabled: boolean - - protected readonly toolPreset: ToolPresetName | undefined - - /** Rule output configuration */ - protected readonly rulesConfig: RuleOutputConfig - - protected readonly ruleOutputEnabled: boolean - - protected readonly cleanupConfig: OutputCleanupConfig - - protected readonly wslMirrorPaths: readonly string[] - - protected readonly supportsBlankLineAfterFrontMatter: boolean - - private readonly registryWriterCache: Map> = new Map() - - private warnedDeprecatedSubAgentFileNameSource = false - - protected constructor(name: string, options?: AbstractOutputPluginOptions) { - super(name, PluginKind.Output, options?.dependsOn) - this.globalConfigDir = options?.globalConfigDir ?? '' - this.outputFileName = options?.outputFileName ?? '' - this.treatWorkspaceRootProjectAsProject = options?.treatWorkspaceRootProjectAsProject ?? false - this.indexignore = options?.indexignore - - this.commandOutputEnabled = options?.commands != null - this.commandsConfig = this.createCommandsConfig(options?.commands) - this.subAgentOutputEnabled = options?.subagents != null - this.subAgentsConfig = this.createSubAgentsConfig(options?.subagents) - this.skillOutputEnabled = options?.skills != null - this.skillsConfig = this.createSkillsConfig(options?.skills) - this.toolPreset = options?.toolPreset - - this.ruleOutputEnabled = options?.rules != null - this.rulesConfig = { - ...options?.rules, - sourceScopes: options?.rules?.sourceScopes ?? ['project', 'global'] - } // Initialize rule output config with defaults - this.cleanupConfig = options?.cleanup ?? {} - this.wslMirrorPaths = options?.wslMirrors ?? [] - this.supportsBlankLineAfterFrontMatter = options?.supportsBlankLineAfterFrontMatter ?? true - - this.outputCapabilities = options?.capabilities != null ? this.normalizeCapabilities(options.capabilities) : this.buildInferredCapabilities() - } - - private createCommandsConfig(config?: CommandOutputConfig): AbstractOutputPlugin['commandsConfig'] { - return { - subDir: config?.subDir ?? 'commands', - sourceScopes: config?.sourceScopes ?? ['project', 'global'], - ...config?.scopeRemap != null && {scopeRemap: config.scopeRemap}, - ...config?.transformFrontMatter != null && { - transformFrontMatter: config.transformFrontMatter - } - } - } - - private createSubAgentsConfig(config?: SubAgentsOutputConfig): AbstractOutputPlugin['subAgentsConfig'] { - return { - subDir: config?.subDir ?? 'agents', - sourceScopes: config?.sourceScopes ?? ['project', 'global'], - includePrefix: config?.includePrefix ?? true, - linkSymbol: config?.linkSymbol ?? '-', - ext: config?.ext ?? '.md', - artifactFormat: config?.artifactFormat ?? 'markdown', - fileNameSource: config?.fileNameSource ?? 'derivedPath', - ...config?.bodyFieldName != null && { - bodyFieldName: config.bodyFieldName - }, - ...config?.fieldNameMap != null && { - fieldNameMap: config.fieldNameMap - }, - ...config?.excludedFrontMatterFields != null && { - excludedFrontMatterFields: config.excludedFrontMatterFields - }, - ...config?.extraFields != null && {extraFields: config.extraFields}, - ...config?.fieldOrder != null && {fieldOrder: config.fieldOrder}, - ...config?.scopeRemap != null && {scopeRemap: config.scopeRemap}, - ...config?.transformFrontMatter != null && { - transformFrontMatter: config.transformFrontMatter - } - } - } - - private createSkillsConfig(config?: SkillsOutputConfig): AbstractOutputPlugin['skillsConfig'] { - return { - subDir: config?.subDir ?? 'skills', - sourceScopes: config?.sourceScopes ?? ['project', 'global'], - ...config?.scopeRemap != null && {scopeRemap: config.scopeRemap} - } - } - - private buildInferredCapabilities(): OutputPluginCapabilities { - const capabilities: OutputPluginCapabilities = {} - - if (this.outputFileName.length > 0) { - capabilities.prompt = { - scopes: ['project', 'global'], - singleScope: false - } - } - - if (this.ruleOutputEnabled) { - capabilities.rules = { - scopes: this.rulesConfig.sourceScopes ?? ['project', 'global'], - singleScope: false - } - } - - if (this.commandOutputEnabled) { - capabilities.commands = { - scopes: this.commandsConfig.sourceScopes, - singleScope: true - } - } - - if (this.subAgentOutputEnabled) { - capabilities.subagents = { - scopes: this.subAgentsConfig.sourceScopes, - singleScope: true - } - } - - if (this.skillOutputEnabled) { - capabilities.skills = { - scopes: this.skillsConfig.sourceScopes, - singleScope: true - } - } - - return capabilities - } - - private normalizeCapabilities(capabilities: OutputPluginCapabilities): OutputPluginCapabilities { - const normalizedCapabilities: OutputPluginCapabilities = {} - for (const topic of OUTPUT_SCOPE_TOPICS) { - const capability = capabilities[topic] - if (capability == null) continue - - const normalized = this.normalizeCapability(capability) - if (normalized != null) normalizedCapabilities[topic] = normalized - } - return normalizedCapabilities - } - - private normalizeCapability(capability: OutputTopicCapability): OutputTopicCapability | undefined { - const uniqueScopes: OutputDeclarationScope[] = [] - for (const scope of capability.scopes) { - if (!uniqueScopes.includes(scope)) uniqueScopes.push(scope) - } - if (uniqueScopes.length === 0) return void 0 - return { - scopes: uniqueScopes, - singleScope: capability.singleScope - } - } - - protected resolvePromptSourceProjectConfig(ctx: OutputPluginContext | OutputWriteContext): ProjectConfig | undefined { - const projects = this.getConcreteProjects(ctx) - const promptSource = projects.find(p => p.isPromptSourceProject === true) - return promptSource?.projectConfig ?? projects[0]?.projectConfig - } - - protected getConcreteProjects(ctx: OutputPluginContext | OutputWriteContext): Project[] { - return ctx.collectedOutputContext.workspace.projects.filter(project => project.isWorkspaceRootProject !== true) - } - - protected isProjectPromptOutputTarget(project: Project): boolean { - return project.isPromptSourceProject !== true - } - - protected getProjectOutputProjects(ctx: OutputPluginContext | OutputWriteContext): Project[] { - const projects = [...this.getConcreteProjects(ctx)] - if (!this.treatWorkspaceRootProjectAsProject) return projects - - const workspaceRootProject = this.getWorkspaceRootProject(ctx) - if (workspaceRootProject != null) projects.push(workspaceRootProject) - return projects - } - - protected getProjectPromptOutputProjects(ctx: OutputPluginContext | OutputWriteContext): Project[] { - return this.getProjectOutputProjects(ctx).filter(project => this.isProjectPromptOutputTarget(project)) - } - - protected getWorkspaceRootProject(ctx: OutputPluginContext | OutputWriteContext): Project | undefined { - return ctx.collectedOutputContext.workspace.projects.find(project => project.isWorkspaceRootProject === true) - } - - protected resolveProjectRootDir(ctx: OutputPluginContext | OutputWriteContext, project: Project): string | undefined { - if (project.isWorkspaceRootProject === true) { - return this.resolveDirectoryPath(ctx.collectedOutputContext.workspace.directory) - } - - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) return void 0 - return this.resolveDirectoryPath(projectDir) - } - - protected resolveProjectConfigDir(ctx: OutputPluginContext | OutputWriteContext, project: Project): string | undefined { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (projectRootDir == null) return void 0 - if (this.globalConfigDir.length === 0) return projectRootDir - return path.join(projectRootDir, this.globalConfigDir) - } - - protected isRelativePath(p: Path): boolean { - return p.pathKind === FilePathKind.Relative - } - - protected toRelativePath(p: Path): string { - return p.path - } - - protected resolveFullPath(targetPath: Path, outputFileName?: string): string { - const dirPath = this.resolveDirectoryPath(targetPath) - - const fileName = outputFileName ?? this.outputFileName // Append the output file name if provided or if default is set - if (fileName) return path.join(dirPath, fileName) - return dirPath - } - - protected resolveDirectoryPath(targetPath: Path): string { - if (targetPath.pathKind === FilePathKind.Absolute) return targetPath.path - if ('basePath' in targetPath) { - return path.resolve(targetPath.basePath as string, targetPath.path) - } - return path.resolve(process.cwd(), targetPath.path) - } - - protected getWorkspaceConfigDir(ctx: OutputWriteContext): string { - const workspaceDir = this.resolveDirectoryPath(ctx.collectedOutputContext.workspace.directory) - return path.join(workspaceDir, this.globalConfigDir) - } - - protected createRelativePath(pathStr: string, basePath: string, dirNameFn: () => string): string { - void dirNameFn - return path.join(basePath, pathStr) - } - - protected createFileRelativePath(dir: string, fileName: string): string { - return path.join(dir, fileName) - } - - protected getGlobalConfigDir(): string { - return path.join(this.getHomeDir(), this.globalConfigDir) - } - - protected getXdgConfigHomeDir(): string { - const xdgConfigHome = process.env['XDG_CONFIG_HOME'] - if (typeof xdgConfigHome === 'string' && xdgConfigHome.trim().length > 0) { - return xdgConfigHome - } - return path.join(this.getHomeDir(), '.config') - } - - protected getHomeDir(): string { - return getEffectiveHomeDir() - } - - protected joinPath(...segments: string[]): string { - return path.join(...segments) - } - - protected resolvePath(...segments: string[]): string { - return path.resolve(...segments) - } - - protected dirname(p: string): string { - return path.dirname(p) - } - - protected buildProjectPromptCleanupTargets(ctx: OutputCleanContext, fileName: string = this.outputFileName): readonly OutputCleanupPathDeclaration[] { - if (fileName.length === 0) return [] - - const declarations: OutputCleanupPathDeclaration[] = [] - const seenPaths = new Set() - - const pushCleanupFile = (targetPath: string, label: string): void => { - if (seenPaths.has(targetPath)) return - seenPaths.add(targetPath) - declarations.push({ - path: targetPath, - kind: 'file', - scope: 'project', - label - }) - } - - for (const project of this.getProjectPromptOutputProjects(ctx)) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - if (projectRootDir == null) continue - - // Add glob pattern to match all files with the given name in the project directory - // This ensures files in subdirectories not explicitly tracked as childMemoryPrompts are also cleaned up - declarations.push({ - path: this.resolvePath(projectRootDir, '**', fileName), - kind: 'glob', - scope: 'project', - label: 'delete.project.glob' - }) - - pushCleanupFile(this.resolvePath(projectRootDir, fileName), 'delete.project') - - if (project.childMemoryPrompts == null) continue - for (const child of project.childMemoryPrompts) { - pushCleanupFile(this.resolveFullPath(child.dir, fileName), 'delete.project.child') - } - } - - return declarations - } - - protected basename(p: string, ext?: string): string { - return path.basename(p, ext) - } - - protected getIgnoreOutputPath(): string | undefined { - if (this.indexignore == null) return void 0 - return this.indexignore - } - - private resolveCleanupScopeBasePaths(scope: OutputCleanupScope, ctx: OutputCleanContext): readonly string[] { - if (scope === 'global') return [this.getHomeDir()] - if (scope === 'xdgConfig') return [this.getXdgConfigHomeDir()] - - const projectBasePaths: string[] = [] - for (const project of this.getProjectOutputProjects(ctx)) { - const projectBasePath = this.resolveProjectRootDir(ctx, project) - if (projectBasePath == null) continue - projectBasePaths.push(projectBasePath) - } - return projectBasePaths - } - - private resolveCleanupDeclaredPath(basePath: string, declaredPath: string): string { - if (path.isAbsolute(declaredPath)) return path.resolve(declaredPath) - if (declaredPath === '~') return this.getHomeDir() - if (declaredPath.startsWith('~/') || declaredPath.startsWith('~\\')) { - return path.resolve(this.getHomeDir(), declaredPath.slice(2)) - } - return path.resolve(basePath, declaredPath) - } - - private normalizeGlobPattern(rawPattern: string): string { - return rawPattern.replaceAll('\\', '/') - } - - private buildCleanupTargetsFromScopeConfig( - scopeConfig: Partial> | undefined, - kind: 'delete' | 'protect', - ctx: OutputCleanContext - ): readonly OutputCleanupPathDeclaration[] { - if (scopeConfig == null) return [] - - const declarations: OutputCleanupPathDeclaration[] = [] - const scopes: readonly OutputCleanupScope[] = ['project', 'global', 'xdgConfig'] - - const pushTargets = (scope: OutputCleanupScope, targetKind: 'file' | 'directory' | 'glob', entries: readonly string[] | undefined): void => { - if (entries == null || entries.length === 0) return - const basePaths = this.resolveCleanupScopeBasePaths(scope, ctx) - - for (const entry of entries) { - for (const basePath of basePaths) { - const resolved = path.isAbsolute(entry) ? path.resolve(entry) : this.resolveCleanupDeclaredPath(basePath, entry) - - declarations.push({ - path: targetKind === 'glob' ? this.normalizeGlobPattern(resolved) : resolved, - kind: targetKind, - scope, - label: `${kind}.${scope}` - }) - } - } - } - - for (const scope of scopes) { - const entries = scopeConfig[scope] - if (entries == null) continue - pushTargets(scope, 'file', entries.files) - pushTargets(scope, 'directory', entries.dirs) - pushTargets(scope, 'glob', entries.globs) - } - - return declarations - } - - protected resolveFrontMatterBlankLineAfter(ctx?: OutputPluginContext): boolean { - if (!this.supportsBlankLineAfterFrontMatter) return true - return ctx?.pluginOptions?.frontMatter?.blankLineAfter ?? true - } - - protected buildMarkdownContent(content: string, frontMatter?: Record, ctx?: OutputPluginContext): string { - return buildMarkdownWithFrontMatter(frontMatter, content, { - blankLineAfter: this.resolveFrontMatterBlankLineAfter(ctx) - }) - } - - protected buildMarkdownContentWithRaw(content: string, frontMatter?: Record, rawFrontMatter?: string, ctx?: OutputPluginContext): string { - if (frontMatter != null && Object.keys(frontMatter).length > 0) { - return this.buildMarkdownContent(content, frontMatter, ctx) - } // If we have parsed front matter, use it - - if (rawFrontMatter != null && rawFrontMatter.length > 0) { - return buildMarkdownWithRawFrontMatter(rawFrontMatter, content, { - blankLineAfter: this.resolveFrontMatterBlankLineAfter(ctx) - }) - } // If we have raw front matter but parsing failed, use raw - - return content // No front matter - } - - protected buildTomlContent(options: BuildPromptTomlArtifactOptions): string { - return buildPromptTomlArtifact(options) - } - - protected extractGlobalMemoryContent(ctx: OutputWriteContext): string | undefined { - return ctx.collectedOutputContext.globalMemory?.content as string | undefined - } - - protected combineGlobalWithContent(globalContent: string | undefined, projectContent: string, options?: CombineOptions): string { - const {separator = '\n\n', skipIfEmpty = true, position = 'before'} = options ?? {} - - if (skipIfEmpty && (globalContent == null || globalContent.trim().length === 0)) { - return projectContent - } // Skip if global content is undefined/null or empty/whitespace when skipIfEmpty is true - - const effectiveGlobalContent = globalContent ?? '' // If global content is null/undefined but skipIfEmpty is false, treat as empty string - - if (position === 'after') { - return `${projectContent}${separator}${effectiveGlobalContent}` - } // Combine based on position - - return `${effectiveGlobalContent}${separator}${projectContent}` // Default: 'before' - } - - protected getSkillName(skill: SkillPrompt): string { - return resolveSkillName(skill) - } - - protected getSubAgentCanonicalName(subAgent: SubAgentPrompt): string { - return resolveSubAgentCanonicalName(subAgent) - } - - protected transformCommandName(cmd: CommandPrompt, options?: CommandNameTransformOptions): string { - const {includeSeriesPrefix = true, seriesSeparator = '-'} = options ?? {} - - if (!includeSeriesPrefix || cmd.commandPrefix == null) { - return `${cmd.commandName}.md` - } // If prefix should not be included or prefix is not present, return just commandName - - return `${cmd.commandPrefix}${seriesSeparator}${cmd.commandName}.md` - } - - protected transformSubAgentName(subAgent: SubAgentPrompt, options?: SubAgentNameTransformOptions): string { - const {fileNameSource} = this.subAgentsConfig - const includePrefix = options?.includePrefix ?? this.subAgentsConfig.includePrefix - const linkSymbol = options?.linkSymbol ?? this.subAgentsConfig.linkSymbol - const ext = options?.ext ?? this.subAgentsConfig.ext - const normalizedExt = ext.startsWith('.') ? ext : `.${ext}` - if (fileNameSource === 'frontMatterName') { - this.warnDeprecatedSubAgentFileNameSource() - } - - const hasPrefix = includePrefix && subAgent.agentPrefix != null && subAgent.agentPrefix.length > 0 - if (!hasPrefix) return `${subAgent.agentName}${normalizedExt}` - return `${subAgent.agentPrefix}${linkSymbol}${subAgent.agentName}${normalizedExt}` - } - - protected normalizeOutputFileStem(value: string): string { - const sanitizedCharacters = Array.from(value.trim(), character => { - const codePoint = character.codePointAt(0) ?? 0 - if (codePoint <= 31 || '<>:"/\\|?*'.includes(character)) return '-' - return character - }) - let normalized = sanitizedCharacters.join('') - - while (normalized.endsWith('.') || normalized.endsWith(' ')) { - normalized = normalized.slice(0, -1) - } - - if (normalized.length === 0) { - throw new Error(`Cannot derive a valid output file name from "${value}"`) - } - - return normalized - } - - private warnDeprecatedSubAgentFileNameSource(): void { - if (this.warnedDeprecatedSubAgentFileNameSource) return - this.warnedDeprecatedSubAgentFileNameSource = true - - this.log.warn( - buildConfigDiagnostic({ - code: 'SUBAGENT_FRONTMATTER_NAME_SOURCE_DEPRECATED', - title: 'Sub-agent fileNameSource="frontMatterName" now resolves from derived names', - reason: diagnosticLines( - `The ${this.name} plugin no longer reads authored sub-agent front matter names.`, - 'tnmsc now derives sub-agent names from the sub-agent path.' - ), - exactFix: diagnosticLines( - 'Remove authored `name` fields from sub-agent sources.', - 'Keep using `fileNameSource="frontMatterName"` only as a temporary alias for the derived-path naming behavior.' - ), - details: { - plugin: this.name - } - }) - ) - } - - protected appendSubAgentDeclarations( - declarations: OutputFileDeclaration[], - basePath: string, - scope: OutputDeclarationScope, - scopedSubAgents: readonly SubAgentPrompt[] - ): void { - const seenPaths = new Map() - - for (const subAgent of scopedSubAgents) { - const fileName = this.transformSubAgentName(subAgent) - const targetPath = path.join(basePath, this.subAgentsConfig.subDir, fileName) - const existingAgentName = seenPaths.get(targetPath) - - if (existingAgentName != null) { - throw new Error( - `Sub-agent output collision in ${this.name}: "${this.getSubAgentCanonicalName(subAgent)}" and "${existingAgentName}" both resolve to ${targetPath}` - ) - } - - seenPaths.set(targetPath, this.getSubAgentCanonicalName(subAgent)) - declarations.push({ - path: targetPath, - scope, - source: {kind: 'subAgent', subAgent} - }) - } - } - - protected appendCommandDeclarations( - declarations: OutputFileDeclaration[], - basePath: string, - scope: OutputDeclarationScope, - commands: readonly CommandPrompt[], - transformOptions: CommandNameTransformOptions - ): void { - for (const cmd of commands) { - const fileName = this.transformCommandName(cmd, transformOptions) - declarations.push({ - path: path.join(basePath, this.commandsConfig.subDir, fileName), - scope, - source: {kind: 'command', command: cmd} - }) - } - } - - protected appendSkillDeclarations( - declarations: OutputFileDeclaration[], - basePath: string, - scope: OutputDeclarationScope, - scopedSkills: readonly SkillPrompt[] - ): void { - for (const skill of scopedSkills) { - const skillName = this.getSkillName(skill) - const skillDir = path.join(basePath, this.skillsConfig.subDir, skillName) - - declarations.push({ - path: path.join(skillDir, 'SKILL.md'), - scope, - source: {kind: 'skillMain', skill} - }) - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) { - declarations.push({ - path: path.join(skillDir, childDoc.dir.path.replace(/\.mdx$/, '.md')), - scope, - source: { - kind: 'skillReference', - content: childDoc.content as string - } - }) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - declarations.push({ - path: path.join(skillDir, resource.relativePath), - scope, - source: { - kind: 'skillResource', - content: resource.content, - encoding: resource.encoding - } - }) - } - } - } - } - - protected appendRuleDeclarations(declarations: OutputFileDeclaration[], basePath: string, scope: OutputDeclarationScope, rules: readonly RulePrompt[]): void { - const rulesDir = path.join(basePath, this.rulesConfig.subDir ?? 'rules') - - for (const rule of rules) { - declarations.push({ - path: path.join(rulesDir, this.buildRuleFileName(rule)), - scope, - source: {kind: 'rule', rule} - }) - } - } - - protected buildSubAgentTomlContent(agent: SubAgentPrompt, frontMatter: Record | undefined): string { - const {bodyFieldName} = this.subAgentsConfig - if (bodyFieldName == null || bodyFieldName.length === 0) { - throw new Error(`subagents.bodyFieldName is required when artifactFormat="toml" for ${this.name}`) - } - - return this.buildTomlContent({ - content: agent.content, - bodyFieldName, - ...frontMatter != null && {frontMatter}, - ...this.subAgentsConfig.fieldNameMap != null && { - fieldNameMap: this.subAgentsConfig.fieldNameMap - }, - ...this.subAgentsConfig.excludedFrontMatterFields != null && { - excludedKeys: this.subAgentsConfig.excludedFrontMatterFields - }, - ...this.subAgentsConfig.extraFields != null && { - extraFields: this.subAgentsConfig.extraFields - }, - ...this.subAgentsConfig.fieldOrder != null && { - fieldOrder: this.subAgentsConfig.fieldOrder - } - }) - } - - protected getCommandSeriesOptions(ctx: OutputWriteContext): CommandSeriesPluginOverride { - const globalOptions = ctx.pluginOptions?.commandSeriesOptions - const pluginOverride = globalOptions?.pluginOverrides?.[this.name] - - const includeSeriesPrefix = pluginOverride?.includeSeriesPrefix ?? globalOptions?.includeSeriesPrefix // Only include properties that have defined values to satisfy exactOptionalPropertyTypes // Plugin-specific overrides take precedence over global settings - const seriesSeparator = pluginOverride?.seriesSeparator - - if (includeSeriesPrefix != null && seriesSeparator != null) { - return {includeSeriesPrefix, seriesSeparator} - } // Build result object conditionally to avoid assigning undefined to readonly properties - if (includeSeriesPrefix != null) return {includeSeriesPrefix} - if (seriesSeparator != null) return {seriesSeparator} - return {} - } - - protected getTransformOptionsFromContext(ctx: OutputWriteContext, additionalOptions?: CommandNameTransformOptions): CommandNameTransformOptions { - const seriesOptions = this.getCommandSeriesOptions(ctx) - - const includeSeriesPrefix = seriesOptions.includeSeriesPrefix ?? additionalOptions?.includeSeriesPrefix // Only include properties that have defined values to satisfy exactOptionalPropertyTypes // Merge: additionalOptions (plugin defaults) <- seriesOptions (config overrides) - const seriesSeparator = seriesOptions.seriesSeparator ?? additionalOptions?.seriesSeparator - - if (includeSeriesPrefix != null && seriesSeparator != null) { - return {includeSeriesPrefix, seriesSeparator} - } // Build result object conditionally to avoid assigning undefined to readonly properties - if (includeSeriesPrefix != null) return {includeSeriesPrefix} - if (seriesSeparator != null) return {seriesSeparator} - return {} - } - - protected shouldSkipDueToPlugin(ctx: OutputWriteContext, precedingPluginName: string): boolean { - const registeredPlugins = ctx.registeredPluginNames - if (registeredPlugins == null) return false - return registeredPlugins.includes(precedingPluginName) - } - - protected getRegistryWriter>(WriterClass: new (logger: ILogger) => T): T { - const cacheKey = WriterClass.name - - const cached = this.registryWriterCache.get(cacheKey) // Check cache first - if (cached != null) return cached as T - - const writer = new WriterClass(this.log) // Create new instance and cache it - this.registryWriterCache.set(cacheKey, writer as RegistryWriter) - return writer - } - - protected async registerInRegistry( - writer: RegistryWriter, - entries: readonly TEntry[], - ctx: OutputWriteContext - ): Promise { - return writer.register(entries, ctx.dryRun) - } - - protected normalizeRuleScope(rule: RulePrompt): RuleScope { - return rule.scope ?? 'project' - } - - protected normalizeSourceScope(scope: RuleScope | undefined): OutputDeclarationScope { - if (scope === 'global' || scope === 'project') return scope - return 'project' - } - - protected remapDeclarationScope( - scope: OutputDeclarationScope, - remap?: Partial> - ): OutputDeclarationScope { - return remap?.[scope] ?? scope - } - - protected resolveCommandSourceScope(cmd: CommandPrompt): OutputDeclarationScope { - if (cmd.globalOnly === true) return 'global' - const scope = (cmd.yamlFrontMatter as {scope?: RuleScope} | undefined)?.scope - return this.remapDeclarationScope(this.normalizeSourceScope(scope), this.commandsConfig.scopeRemap) - } - - protected resolveSubAgentSourceScope(subAgent: SubAgentPrompt): OutputDeclarationScope { - const scope = (subAgent.yamlFrontMatter as {scope?: RuleScope} | undefined)?.scope - return this.remapDeclarationScope(this.normalizeSourceScope(scope), this.subAgentsConfig.scopeRemap) - } - - protected resolveSkillSourceScope(skill: SkillPrompt): OutputDeclarationScope { - const scope = (skill.yamlFrontMatter as {scope?: RuleScope} | undefined)?.scope - return this.remapDeclarationScope(this.normalizeSourceScope(scope), this.skillsConfig.scopeRemap) - } - - protected selectSingleScopeItems( - items: readonly T[], - sourceScopes: readonly OutputDeclarationScope[], - resolveScope: (item: T) => OutputDeclarationScope, - requestedScopes?: OutputScopeSelection - ): { - readonly selectedScope?: OutputDeclarationScope - readonly items: readonly T[] - } { - if (items.length === 0) return {items: []} - - const availableScopes = [...new Set(items.map(resolveScope))] - const selectedScopes = resolveTopicScopes({ - requestedScopes, - defaultScopes: sourceScopes, - supportedScopes: sourceScopes, - singleScope: true, - availableScopes - }) - const [selectedScope] = selectedScopes - if (selectedScope == null) return {items: []} - - return { - selectedScope, - items: items.filter(item => resolveScope(item) === selectedScope) - } - } - - protected selectRuleScopes(ctx: OutputWriteContext, rules: readonly RulePrompt[]): readonly OutputDeclarationScope[] { - const availableScopes = [...new Set(rules.map(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule))))] - return resolveTopicScopes({ - requestedScopes: this.getTopicScopeOverride(ctx, 'rules'), - defaultScopes: this.rulesConfig.sourceScopes ?? ['project', 'global'], - supportedScopes: this.rulesConfig.sourceScopes ?? ['project', 'global'], - singleScope: false, - availableScopes - }).filter(scope => availableScopes.includes(scope)) - } - - protected selectPromptScopes( - ctx: OutputWriteContext, - supportedScopes: readonly OutputDeclarationScope[] = ['project', 'global'], - defaultScopes: readonly OutputDeclarationScope[] = supportedScopes - ): readonly OutputDeclarationScope[] { - return resolveTopicScopes({ - requestedScopes: this.getTopicScopeOverride(ctx, 'prompt'), - defaultScopes, - supportedScopes, - singleScope: false - }) - } - - protected getTopicScopeOverride(ctx: OutputPluginContext | OutputWriteContext, topic: OutputScopeTopic): OutputScopeSelection | undefined { - return ctx.pluginOptions?.outputScopes?.plugins?.[this.name]?.[topic] - } - - protected buildSkillFrontMatter(skill: SkillPrompt, options?: SkillFrontMatterOptions): Record { - const fm = skill.yamlFrontMatter - const result: Record = { - name: this.getSkillName(skill), - description: fm.description - } - - if ('displayName' in fm && fm.displayName != null) { - // Conditionally add optional fields - result['displayName'] = fm.displayName - } - if ('keywords' in fm && fm.keywords != null && fm.keywords.length > 0) { - result['keywords'] = fm.keywords - } - if ('author' in fm && fm.author != null) result['author'] = fm.author - if ('version' in fm && fm.version != null) result['version'] = fm.version - - const includeTools = options?.includeTools ?? true // Handle tools based on options - if (includeTools && 'allowTools' in fm && fm.allowTools != null && fm.allowTools.length > 0) { - const toolFormat = options?.toolFormat ?? 'array' - result['allowTools'] = toolFormat === 'string' ? fm.allowTools.join(',') : fm.allowTools - } - - if (options?.additionalFields != null) { - // Add any additional custom fields - Object.assign(result, options.additionalFields) - } - - return result - } - - protected buildRuleContent(rule: RulePrompt, ctx?: OutputPluginContext): string { - const fmData = this.rulesConfig.transformFrontMatter ? this.rulesConfig.transformFrontMatter(rule) : {globs: rule.globs.join(', ')} - - const sanitizedFmData = fmData == null || Object.keys(fmData).length === 0 ? void 0 : fmData - - return this.buildMarkdownContent(rule.content, sanitizedFmData, ctx) - } - - protected buildRuleFileName(rule: RulePrompt): string { - const prefix = `${this.rulesConfig.prefix ?? 'rule'}${this.rulesConfig.linkSymbol ?? '-'}` - const fileName = `${prefix}${rule.prefix}${this.rulesConfig.linkSymbol ?? '-'}${rule.ruleName}${this.rulesConfig.ext ?? '.md'}` - return fileName - } - - async declareOutputFiles(ctx: OutputWriteContext): Promise { - return this.buildDefaultOutputDeclarations(ctx) - } - - async declareCleanupPaths(ctx: OutputCleanContext): Promise { - const cleanupDelete = this.buildCleanupTargetsFromScopeConfig(this.cleanupConfig.delete, 'delete', ctx) - const cleanupProtect = this.buildCleanupTargetsFromScopeConfig(this.cleanupConfig.protect, 'protect', ctx) - const {excludeScanGlobs} = this.cleanupConfig - - if (cleanupDelete.length === 0 && cleanupProtect.length === 0 && (excludeScanGlobs == null || excludeScanGlobs.length === 0)) { - return {} - } - - return { - ...cleanupDelete.length > 0 && {delete: cleanupDelete}, - ...cleanupProtect.length > 0 && {protect: cleanupProtect}, - ...excludeScanGlobs != null && excludeScanGlobs.length > 0 && {excludeScanGlobs} - } - } - - async declareWslMirrorFiles(ctx: OutputWriteContext): Promise { - void ctx - return this.wslMirrorPaths.map(sourcePath => ({sourcePath})) - } - - async convertContent(declaration: OutputFileDeclaration, ctx: OutputWriteContext): Promise { - const source = declaration.source as DeclarativeOutputSource - - switch (source.kind) { - case 'projectRootMemory': - case 'projectChildMemory': - case 'globalMemory': - case 'skillReference': - case 'ignoreFile': - return source.content - case 'command': - return this.buildCommandContent(source.command, ctx) - case 'subAgent': - return this.buildSubAgentContent(source.subAgent, ctx) - case 'skillMain': - return this.buildSkillMainContent(source.skill, ctx) - case 'skillResource': - return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content - case 'rule': - return this.buildRuleContent(source.rule, ctx) - default: - throw new Error(`Unsupported declaration source for plugin ${this.name}`) - } - } - - protected async buildDefaultOutputDeclarations(ctx: OutputWriteContext): Promise { - const declarations: OutputFileDeclaration[] = [] - const {globalMemory, commands, subAgents, skills, rules, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext - const transformOptions = this.getTransformOptionsFromContext(ctx) - const ignoreOutputPath = this.getIgnoreOutputPath() - const ignoreFile = this.indexignore == null ? void 0 : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) - const selectedCommands - = this.commandOutputEnabled && commands != null - ? this.selectSingleScopeItems( - commands, - this.commandsConfig.sourceScopes, - cmd => this.resolveCommandSourceScope(cmd), - this.getTopicScopeOverride(ctx, 'commands') - ) - : {items: [] as readonly CommandPrompt[]} - - const selectedSubAgents - = this.subAgentOutputEnabled && subAgents != null - ? this.selectSingleScopeItems( - subAgents, - this.subAgentsConfig.sourceScopes, - subAgent => this.resolveSubAgentSourceScope(subAgent), - this.getTopicScopeOverride(ctx, 'subagents') - ) - : {items: [] as readonly SubAgentPrompt[]} - - const selectedSkills - = this.skillOutputEnabled && skills != null - ? this.selectSingleScopeItems( - skills, - this.skillsConfig.sourceScopes, - skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'skills') - ) - : {items: [] as readonly SkillPrompt[]} - - const allRules = rules ?? [] - const activeRuleScopes = this.ruleOutputEnabled && allRules.length > 0 ? new Set(this.selectRuleScopes(ctx, allRules)) : new Set() - const activePromptScopes = new Set(this.selectPromptScopes(ctx, this.outputCapabilities.prompt?.scopes ?? ['project', 'global'])) - - const rulesByScope: Record = { - project: [], - global: [] - } - for (const rule of allRules) { - const ruleScope = this.normalizeSourceScope(this.normalizeRuleScope(rule)) - rulesByScope[ruleScope].push(rule) - } - - for (const project of this.getProjectOutputProjects(ctx)) { - const projectRootDir = this.resolveProjectRootDir(ctx, project) - const basePath = this.resolveProjectConfigDir(ctx, project) - if (projectRootDir == null || basePath == null) continue - - if (this.outputFileName.length > 0 && activePromptScopes.has('project') && this.isProjectPromptOutputTarget(project)) { - if (project.rootMemoryPrompt != null) { - declarations.push({ - path: path.join(projectRootDir, this.outputFileName), - scope: 'project', - source: { - kind: 'projectRootMemory', - content: project.rootMemoryPrompt.content as string - } - }) - } - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) { - declarations.push({ - path: this.resolveFullPath(child.dir), - scope: 'project', - source: { - kind: 'projectChildMemory', - content: child.content as string - } - }) - } - } - } - - const {projectConfig} = project - - if (selectedCommands.selectedScope === 'project' && selectedCommands.items.length > 0) { - const filteredCommands = filterByProjectConfig(selectedCommands.items, projectConfig, 'commands') - this.appendCommandDeclarations(declarations, basePath, 'project', filteredCommands, transformOptions) - } - - if (selectedSubAgents.selectedScope === 'project' && selectedSubAgents.items.length > 0) { - const filteredSubAgents = filterByProjectConfig(selectedSubAgents.items, projectConfig, 'subAgents') - this.appendSubAgentDeclarations(declarations, basePath, 'project', filteredSubAgents) - } - - if (selectedSkills.selectedScope === 'project' && selectedSkills.items.length > 0) { - const filteredSkills = filterByProjectConfig(selectedSkills.items, projectConfig, 'skills') - this.appendSkillDeclarations(declarations, basePath, 'project', filteredSkills) - } - - if (activeRuleScopes.has('project')) { - const projectRules = applySubSeriesGlobPrefix(filterByProjectConfig(rulesByScope.project, projectConfig, 'rules'), projectConfig) - this.appendRuleDeclarations(declarations, basePath, 'project', projectRules) - } - - if ( - ignoreOutputPath != null - && ignoreFile != null - && project.isWorkspaceRootProject !== true - && project.isPromptSourceProject !== true - && project.dirFromWorkspacePath != null - ) { - declarations.push({ - path: path.join(project.dirFromWorkspacePath.basePath, project.dirFromWorkspacePath.path, ignoreOutputPath), - scope: 'project', - source: {kind: 'ignoreFile', content: ignoreFile.content} - }) - } - } - - const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) - - if (selectedCommands.selectedScope === 'global' && selectedCommands.items.length > 0) { - const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') - const basePath = this.getGlobalConfigDir() - this.appendCommandDeclarations(declarations, basePath, 'global', filteredCommands, transformOptions) - } - - if (selectedSubAgents.selectedScope === 'global' && selectedSubAgents.items.length > 0) { - const filteredSubAgents = filterByProjectConfig(selectedSubAgents.items, promptSourceProjectConfig, 'subAgents') - const basePath = this.getGlobalConfigDir() - this.appendSubAgentDeclarations(declarations, basePath, 'global', filteredSubAgents) - } - - if (selectedSkills.selectedScope === 'global' && selectedSkills.items.length > 0) { - const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') - const basePath = this.getGlobalConfigDir() - this.appendSkillDeclarations(declarations, basePath, 'global', filteredSkills) - } - - for (const ruleScope of ['global'] as const) { - if (!activeRuleScopes.has(ruleScope)) continue - const basePath = this.getGlobalConfigDir() - const filteredRules = applySubSeriesGlobPrefix( - filterByProjectConfig(rulesByScope[ruleScope], promptSourceProjectConfig, 'rules'), - promptSourceProjectConfig - ) - this.appendRuleDeclarations(declarations, basePath, ruleScope, filteredRules) - } - - if (globalMemory != null && this.outputFileName.length > 0 && activePromptScopes.has('global')) { - declarations.push({ - path: path.join(this.getGlobalConfigDir(), this.outputFileName), - scope: 'global', - source: { - kind: 'globalMemory', - content: globalMemory.content as string - } - }) - } - - return declarations - } - - protected async buildCommandContent(cmd: CommandPrompt, ctx?: OutputPluginContext): Promise { - let compiledContent = cmd.content - let compiledFrontMatter = cmd.yamlFrontMatter - let useRecompiledFrontMatter = false - - if (cmd.rawMdxContent != null && this.toolPreset != null) { - this.log.debug('recompiling command with tool preset', { - file: cmd.dir.getAbsolutePath(), - toolPreset: this.toolPreset, - hasRawContent: true - }) - const scopeCollector = new GlobalScopeCollector({ - toolPreset: this.toolPreset - }) - const globalScope = scopeCollector.collect() - const result = await compileRawPromptArtifact({ - filePath: cmd.dir.getAbsolutePath(), - globalScope, - rawMdx: cmd.rawMdxContent - }) - compiledContent = result.content - compiledFrontMatter = result.metadata as typeof cmd.yamlFrontMatter - useRecompiledFrontMatter = true - } - - const commandFrontMatterTransformer = this.commandsConfig.transformFrontMatter - if (commandFrontMatterTransformer == null) { - throw new Error(`commands.transformFrontMatter is required for command output plugin: ${this.name}`) - } - - const transformedFrontMatter = commandFrontMatterTransformer(cmd, { - isRecompiled: useRecompiledFrontMatter, - ...compiledFrontMatter != null && { - sourceFrontMatter: compiledFrontMatter as Record - } - }) - - return this.buildMarkdownContent(compiledContent, transformedFrontMatter, ctx) - } - - protected buildSubAgentContent(agent: SubAgentPrompt, ctx?: OutputPluginContext): string { - const subAgentFrontMatterTransformer = this.subAgentsConfig.transformFrontMatter - const transformedFrontMatter = subAgentFrontMatterTransformer?.(agent, { - ...agent.yamlFrontMatter != null && { - sourceFrontMatter: agent.yamlFrontMatter as Record - } - }) - - if (this.subAgentsConfig.artifactFormat === 'toml') { - const sourceFrontMatter = transformedFrontMatter ?? agent.yamlFrontMatter - return this.buildSubAgentTomlContent(agent, sourceFrontMatter) - } - - if (transformedFrontMatter != null) { - return this.buildMarkdownContent(agent.content, transformedFrontMatter, ctx) - } - - return this.buildMarkdownContentWithRaw(agent.content, agent.yamlFrontMatter, agent.rawFrontMatter, ctx) - } - - protected buildSkillMainContent(skill: SkillPrompt, ctx?: OutputPluginContext): string { - return this.buildMarkdownContentWithRaw(skill.content as string, skill.yamlFrontMatter, skill.rawFrontMatter, ctx) - } -} diff --git a/cli/src/plugins/plugin-core/AbstractPlugin.ts b/cli/src/plugins/plugin-core/AbstractPlugin.ts deleted file mode 100644 index 24e2e323..00000000 --- a/cli/src/plugins/plugin-core/AbstractPlugin.ts +++ /dev/null @@ -1,26 +0,0 @@ -import type {ILogger} from '@truenine/logger' -import type {PluginKind} from './enums' -import type {Plugin} from './plugin' - -import {createLogger} from '@truenine/logger' - -export abstract class AbstractPlugin implements Plugin { - readonly type: T - - readonly name: string - - private _log?: ILogger - - get log(): ILogger { - this._log ??= createLogger(this.name) - return this._log - } - - readonly dependsOn?: readonly string[] - - protected constructor(name: string, type: T, dependsOn?: readonly string[]) { - this.name = name - this.type = type - if (dependsOn != null) this.dependsOn = dependsOn - } -} diff --git a/cli/src/plugins/plugin-core/AindexConfigDefaults.ts b/cli/src/plugins/plugin-core/AindexConfigDefaults.ts deleted file mode 100644 index 8a6e7480..00000000 --- a/cli/src/plugins/plugin-core/AindexConfigDefaults.ts +++ /dev/null @@ -1,123 +0,0 @@ -export interface AindexDirPairLike { - readonly src: string - readonly dist: string -} - -export const AINDEX_DEFAULT_DIR_NAME = 'aindex' - -export const AINDEX_PROJECT_SERIES_NAMES = ['app', 'ext', 'arch', 'softwares'] as const - -export type AindexProjectSeriesName = (typeof AINDEX_PROJECT_SERIES_NAMES)[number] - -export const AINDEX_CONFIG_DIRECTORY_PAIR_KEYS = [ - 'skills', - 'commands', - 'subAgents', - 'rules', - ...AINDEX_PROJECT_SERIES_NAMES -] as const - -export const AINDEX_CONFIG_FILE_PAIR_KEYS = [ - 'globalPrompt', - 'workspacePrompt' -] as const - -export const AINDEX_CONFIG_PAIR_KEYS = [ - 'skills', - 'commands', - 'subAgents', - 'rules', - 'globalPrompt', - 'workspacePrompt', - ...AINDEX_PROJECT_SERIES_NAMES -] as const - -export type AindexConfigPairKey = (typeof AINDEX_CONFIG_PAIR_KEYS)[number] -export type AindexConfigDirectoryPairKey = (typeof AINDEX_CONFIG_DIRECTORY_PAIR_KEYS)[number] -export type AindexConfigFilePairKey = (typeof AINDEX_CONFIG_FILE_PAIR_KEYS)[number] -export type AindexConfigKeyPath = `aindex.${AindexConfigPairKey}.src` | `aindex.${AindexConfigPairKey}.dist` - -export const AINDEX_PROMPT_TREE_DIRECTORY_PAIR_KEYS = [ - 'skills', - 'commands', - 'subAgents', - ...AINDEX_PROJECT_SERIES_NAMES -] as const satisfies readonly AindexConfigDirectoryPairKey[] - -export type AindexPromptTreeDirectoryPairKey = (typeof AINDEX_PROMPT_TREE_DIRECTORY_PAIR_KEYS)[number] - -interface MutableAindexDirPair { - src: string - dist: string -} - -export type AindexConfigLike = { - dir: string -} & { - [K in AindexConfigPairKey]: MutableAindexDirPair -} - -export const AINDEX_CONFIG_PAIR_DEFAULTS = { - skills: {src: 'skills', dist: 'dist/skills'}, - commands: {src: 'commands', dist: 'dist/commands'}, - subAgents: {src: 'subagents', dist: 'dist/subagents'}, - rules: {src: 'rules', dist: 'dist/rules'}, - globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, - workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, - app: {src: 'app', dist: 'dist/app'}, - ext: {src: 'ext', dist: 'dist/ext'}, - arch: {src: 'arch', dist: 'dist/arch'}, - softwares: {src: 'softwares', dist: 'dist/softwares'} -} as const satisfies Record - -function buildAindexConfigKeyPaths(): readonly AindexConfigKeyPath[] { - const paths: AindexConfigKeyPath[] = [] - for (const key of AINDEX_CONFIG_PAIR_KEYS) { - paths.push(`aindex.${key}.src`, `aindex.${key}.dist`) - } - return paths -} - -function cloneAindexConfigPairs(): {[K in AindexConfigPairKey]: MutableAindexDirPair} { - return Object.fromEntries( - AINDEX_CONFIG_PAIR_KEYS.map(key => [ - key, - { - ...AINDEX_CONFIG_PAIR_DEFAULTS[key] - } - ]) - ) as {[K in AindexConfigPairKey]: MutableAindexDirPair} -} - -export const AINDEX_CONFIG_KEY_PATHS = buildAindexConfigKeyPaths() - -export function buildDefaultAindexConfig(): AindexConfigLike { - return { - dir: AINDEX_DEFAULT_DIR_NAME, - ...cloneAindexConfigPairs() - } -} - -export function mergeAindexConfig( - base: T, - override?: Partial -): T { - if (override == null) return base - - const mergedPairs = Object.fromEntries( - AINDEX_CONFIG_PAIR_KEYS.map(key => [ - key, - { - ...base[key], - ...override[key] - } - ]) - ) as {[K in AindexConfigPairKey]: T[K]} - - return { - ...base, - ...override, - dir: override.dir ?? base.dir, - ...mergedPairs - } -} diff --git a/cli/src/plugins/plugin-core/AindexTypes.ts b/cli/src/plugins/plugin-core/AindexTypes.ts deleted file mode 100644 index 599efff5..00000000 --- a/cli/src/plugins/plugin-core/AindexTypes.ts +++ /dev/null @@ -1,367 +0,0 @@ -export { - AINDEX_PROJECT_SERIES_NAMES -} from './AindexConfigDefaults' - -export type { - AindexProjectSeriesName -} from './AindexConfigDefaults' - -/** - * Aindex directory structure types and constants - * Used for directory structure validation and generation - */ - -/** - * File entry in the aindex project - */ -export interface AindexFileEntry { - /** File name (e.g., 'GLOBAL.md') */ - readonly name: string - /** Whether this file is required */ - readonly required: boolean - /** File description */ - readonly description?: string -} - -/** - * Directory entry in the aindex project - */ -export interface AindexDirectoryEntry { - /** Directory name (e.g., 'skills') */ - readonly name: string - /** Whether this directory is required */ - readonly required: boolean - /** Directory description */ - readonly description?: string - /** Nested directories */ - readonly directories?: readonly AindexDirectoryEntry[] - /** Files in this directory */ - readonly files?: readonly AindexFileEntry[] -} - -/** - * Root structure of the aindex project - */ -export interface AindexDirectory { - /** Source directories (before compilation) */ - readonly src: { - readonly skills: AindexDirectoryEntry - readonly commands: AindexDirectoryEntry - readonly agents: AindexDirectoryEntry - readonly rules: AindexDirectoryEntry - readonly globalMemoryFile: AindexFileEntry - readonly workspaceMemoryFile: AindexFileEntry - } - /** Distribution directories (after compilation) */ - readonly dist: { - readonly skills: AindexDirectoryEntry - readonly commands: AindexDirectoryEntry - readonly agents: AindexDirectoryEntry - readonly rules: AindexDirectoryEntry - readonly app: AindexDirectoryEntry - readonly ext: AindexDirectoryEntry - readonly arch: AindexDirectoryEntry - readonly softwares: AindexDirectoryEntry - readonly globalMemoryFile: AindexFileEntry - readonly workspaceMemoryFile: AindexFileEntry - } - /** App directory (project-specific prompts source, standalone at root) */ - readonly app: AindexDirectoryEntry - readonly ext: AindexDirectoryEntry - readonly arch: AindexDirectoryEntry - readonly softwares: AindexDirectoryEntry - /** IDE configuration directories */ - readonly ide: { - readonly idea: AindexDirectoryEntry - readonly ideaCodeStyles: AindexDirectoryEntry - readonly vscode: AindexDirectoryEntry - readonly zed: AindexDirectoryEntry - } - /** IDE configuration files */ - readonly ideFiles: readonly AindexFileEntry[] - /** AI Agent ignore files */ - readonly ignoreFiles: readonly AindexFileEntry[] -} - -/** - * Directory names used in aindex project - */ -export const AINDEX_DIR_NAMES = { - SRC: 'src', - DIST: 'dist', - SKILLS: 'skills', - COMMANDS: 'commands', - AGENTS: 'agents', - RULES: 'rules', - APP: 'app', - EXT: 'ext', - ARCH: 'arch', - SOFTWARES: 'softwares', - IDEA: '.idea', // IDE directories - IDEA_CODE_STYLES: '.idea/codeStyles', - VSCODE: '.vscode', - ZED: '.zed' -} as const - -/** - * File names used in aindex project - */ -export const AINDEX_FILE_NAMES = { - GLOBAL_MEMORY: 'global.mdx', // Global memory - GLOBAL_MEMORY_SRC: 'global.src.mdx', - WORKSPACE_MEMORY: 'workspace.mdx', // Workspace memory - WORKSPACE_MEMORY_SRC: 'workspace.src.mdx', - EDITOR_CONFIG: '.editorconfig', // EditorConfig - IDEA_GITIGNORE: '.idea/.gitignore', // JetBrains IDE - IDEA_PROJECT_XML: '.idea/codeStyles/Project.xml', - IDEA_CODE_STYLE_CONFIG_XML: '.idea/codeStyles/codeStyleConfig.xml', - VSCODE_SETTINGS: '.vscode/settings.json', // VS Code - VSCODE_EXTENSIONS: '.vscode/extensions.json', - ZED_SETTINGS: '.zed/settings.json', - QODER_IGNORE: '.qoderignore', // AI Agent ignore files - CURSOR_IGNORE: '.cursorignore', - WARP_INDEX_IGNORE: '.warpindexignore', - AI_IGNORE: '.aiignore', - CODEIUM_IGNORE: '.codeiumignore' // Windsurf ignore file -} as const - -/** - * Relative paths from aindex project root - */ -export const AINDEX_RELATIVE_PATHS = { - SRC_SKILLS: 'src/skills', // Source paths - SRC_COMMANDS: 'src/commands', - SRC_AGENTS: 'src/agents', - SRC_RULES: 'src/rules', - SRC_GLOBAL_MEMORY: 'global.src.mdx', - SRC_WORKSPACE_MEMORY: 'workspace.src.mdx', - DIST_SKILLS: 'dist/skills', // Distribution paths - DIST_COMMANDS: 'dist/commands', - DIST_AGENTS: 'dist/agents', - DIST_RULES: 'dist/rules', - DIST_APP: 'dist/app', - DIST_EXT: 'dist/ext', - DIST_ARCH: 'dist/arch', - DIST_SOFTWARES: 'dist/softwares', - DIST_GLOBAL_MEMORY: 'dist/global.mdx', - DIST_WORKSPACE_MEMORY: 'dist/workspace.mdx', - APP: 'app', // App source path (standalone at root) - EXT: 'ext', - ARCH: 'arch', - SOFTWARES: 'softwares' -} as const - -/** - * Default aindex directory structure - * Used for validation and generation - */ -export const DEFAULT_AINDEX_STRUCTURE: AindexDirectory = { - src: { - skills: { - name: AINDEX_DIR_NAMES.SKILLS, - required: false, - description: 'Skill source files (.src.mdx)' - }, - commands: { - name: AINDEX_DIR_NAMES.COMMANDS, - required: false, - description: 'Fast command source files (.src.mdx)' - }, - agents: { - name: AINDEX_DIR_NAMES.AGENTS, - required: false, - description: 'Sub-agent source files (.src.mdx)' - }, - rules: { - name: AINDEX_DIR_NAMES.RULES, - required: false, - description: 'Rule source files (.src.mdx)' - }, - globalMemoryFile: { - name: AINDEX_FILE_NAMES.GLOBAL_MEMORY_SRC, - required: false, - description: 'Global memory source file' - }, - workspaceMemoryFile: { - name: AINDEX_FILE_NAMES.WORKSPACE_MEMORY_SRC, - required: false, - description: 'Workspace memory source file' - } - }, - dist: { - skills: { - name: AINDEX_DIR_NAMES.SKILLS, - required: false, - description: 'Compiled skill files (.mdx)' - }, - commands: { - name: AINDEX_DIR_NAMES.COMMANDS, - required: false, - description: 'Compiled fast command files (.mdx)' - }, - agents: { - name: AINDEX_DIR_NAMES.AGENTS, - required: false, - description: 'Compiled sub-agent files (.mdx)' - }, - rules: { - name: AINDEX_DIR_NAMES.RULES, - required: false, - description: 'Compiled rule files (.mdx)' - }, - globalMemoryFile: { - name: AINDEX_FILE_NAMES.GLOBAL_MEMORY, - required: false, - description: 'Compiled global memory file' - }, - workspaceMemoryFile: { - name: AINDEX_FILE_NAMES.WORKSPACE_MEMORY, - required: false, - description: 'Compiled workspace memory file' - }, - app: { - name: AINDEX_DIR_NAMES.APP, - required: false, - description: 'Compiled project-specific prompts' - }, - ext: { - name: AINDEX_DIR_NAMES.EXT, - required: false, - description: 'Compiled extension-specific prompts' - }, - arch: { - name: AINDEX_DIR_NAMES.ARCH, - required: false, - description: 'Compiled architecture-specific prompts' - }, - softwares: { - name: AINDEX_DIR_NAMES.SOFTWARES, - required: false, - description: 'Compiled software-repository prompts' - } - }, - app: { - name: AINDEX_DIR_NAMES.APP, - required: false, - description: 'Project-specific prompts (standalone directory)' - }, - ext: { - name: AINDEX_DIR_NAMES.EXT, - required: false, - description: 'Extension-specific prompts (standalone directory)' - }, - arch: { - name: AINDEX_DIR_NAMES.ARCH, - required: false, - description: 'Architecture-specific prompts (standalone directory)' - }, - softwares: { - name: AINDEX_DIR_NAMES.SOFTWARES, - required: false, - description: 'Software-repository prompts (standalone directory)' - }, - ide: { - idea: { - name: AINDEX_DIR_NAMES.IDEA, - required: false, - description: 'JetBrains IDE configuration directory' - }, - ideaCodeStyles: { - name: AINDEX_DIR_NAMES.IDEA_CODE_STYLES, - required: false, - description: 'JetBrains IDE code styles directory' - }, - vscode: { - name: AINDEX_DIR_NAMES.VSCODE, - required: false, - description: 'VS Code configuration directory' - }, - zed: { - name: AINDEX_DIR_NAMES.ZED, - required: false, - description: 'Zed configuration directory' - } - }, - ideFiles: [ - { - name: AINDEX_FILE_NAMES.EDITOR_CONFIG, - required: false, - description: 'EditorConfig file' - }, - { - name: AINDEX_FILE_NAMES.IDEA_GITIGNORE, - required: false, - description: 'JetBrains IDE .gitignore' - }, - { - name: AINDEX_FILE_NAMES.IDEA_PROJECT_XML, - required: false, - description: 'JetBrains IDE Project.xml' - }, - { - name: AINDEX_FILE_NAMES.IDEA_CODE_STYLE_CONFIG_XML, - required: false, - description: 'JetBrains IDE codeStyleConfig.xml' - }, - { - name: AINDEX_FILE_NAMES.VSCODE_SETTINGS, - required: false, - description: 'VS Code settings.json' - }, - { - name: AINDEX_FILE_NAMES.VSCODE_EXTENSIONS, - required: false, - description: 'VS Code extensions.json' - }, - { - name: AINDEX_FILE_NAMES.ZED_SETTINGS, - required: false, - description: 'Zed settings.json' - } - ], - ignoreFiles: [ - { - name: AINDEX_FILE_NAMES.QODER_IGNORE, - required: false, - description: 'Qoder ignore file' - }, - { - name: AINDEX_FILE_NAMES.CURSOR_IGNORE, - required: false, - description: 'Cursor ignore file' - }, - { - name: AINDEX_FILE_NAMES.WARP_INDEX_IGNORE, - required: false, - description: 'Warp index ignore file' - }, - { - name: AINDEX_FILE_NAMES.AI_IGNORE, - required: false, - description: 'AI ignore file' - }, - { - name: AINDEX_FILE_NAMES.CODEIUM_IGNORE, - required: false, - description: 'Windsurf ignore file' - } - ] -} as const - -/** - * Type for directory names - */ -export type AindexDirName - = (typeof AINDEX_DIR_NAMES)[keyof typeof AINDEX_DIR_NAMES] - -/** - * Type for file names - */ -export type AindexFileName - = (typeof AINDEX_FILE_NAMES)[keyof typeof AINDEX_FILE_NAMES] - -/** - * Type for relative paths - */ -export type AindexRelativePath - = (typeof AINDEX_RELATIVE_PATHS)[keyof typeof AINDEX_RELATIVE_PATHS] diff --git a/cli/src/plugins/plugin-core/ConfigTypes.schema.ts b/cli/src/plugins/plugin-core/ConfigTypes.schema.ts deleted file mode 100644 index 3bf047ad..00000000 --- a/cli/src/plugins/plugin-core/ConfigTypes.schema.ts +++ /dev/null @@ -1,188 +0,0 @@ -import {z} from 'zod/v3' -import { - AINDEX_CONFIG_PAIR_DEFAULTS, - AINDEX_CONFIG_PAIR_KEYS, - AINDEX_DEFAULT_DIR_NAME -} from './AindexConfigDefaults' - -/** - * Zod schema for a source/dist path pair. - * Both paths are relative to the aindex project root. - */ -export const ZAindexDirPair = z.object({src: z.string(), dist: z.string()}) - -const AINDEX_CONFIG_PAIR_SCHEMAS = Object.fromEntries( - AINDEX_CONFIG_PAIR_KEYS.map(key => [ - key, - key === 'softwares' - ? ZAindexDirPair.default(AINDEX_CONFIG_PAIR_DEFAULTS[key]) - : ZAindexDirPair - ]) -) as Record< - (typeof AINDEX_CONFIG_PAIR_KEYS)[number], - typeof ZAindexDirPair | z.ZodDefault -> - -/** - * Zod schema for the aindex configuration. - * All paths are relative to /. - */ -export const ZAindexConfig = z.object({ - dir: z.string().default(AINDEX_DEFAULT_DIR_NAME), - ...AINDEX_CONFIG_PAIR_SCHEMAS -}) - -/** - * Zod schema for per-plugin command series override options. - */ -export const ZCommandSeriesPluginOverride = z.object({ - includeSeriesPrefix: z.boolean().optional(), - seriesSeparator: z.string().optional() -}) - -/** - * Zod schema for command series configuration options. - */ -export const ZCommandSeriesOptions = z.object({ - includeSeriesPrefix: z.boolean().optional(), - pluginOverrides: z.record(z.string(), ZCommandSeriesPluginOverride).optional() -}) - -/** - * Zod schema for output scope value. - */ -export const ZOutputScope = z.enum(['project', 'global']) - -/** - * Zod schema for selecting one or more scopes. - */ -export const ZOutputScopeSelection = z.union([ZOutputScope, z.array(ZOutputScope).min(1)]) - -/** - * Zod schema for per-plugin topic scope overrides. - */ -export const ZPluginOutputScopeTopics = z.object({ - prompt: ZOutputScopeSelection.optional(), - rules: ZOutputScopeSelection.optional(), - commands: ZOutputScopeSelection.optional(), - subagents: ZOutputScopeSelection.optional(), - skills: ZOutputScopeSelection.optional(), - mcp: ZOutputScopeSelection.optional() -}) - -/** - * Zod schema for output scope override configuration. - */ -export const ZOutputScopeOptions = z.object({plugins: z.record(z.string(), ZPluginOutputScopeTopics).optional()}) - -/** - * Zod schema for shared front matter formatting options. - */ -export const ZFrontMatterOptions = z.object({blankLineAfter: z.boolean().optional()}) - -export const ZProtectionMode = z.enum(['direct', 'recursive']) -export const ZProtectionRuleMatcher = z.enum(['path', 'glob']) - -export const ZCleanupProtectionRule = z.object({ - path: z.string(), - protectionMode: ZProtectionMode, - matcher: ZProtectionRuleMatcher.optional(), - reason: z.string().optional() -}) - -export const ZCleanupProtectionOptions = z.object({rules: z.array(ZCleanupProtectionRule).optional()}) -export const ZStringOrStringArray = z.union([z.string(), z.array(z.string()).min(1)]) -export const ZWindowsWsl2Options = z.object({ - instances: ZStringOrStringArray.optional() -}) -export const ZWindowsOptions = z.object({ - wsl2: ZWindowsWsl2Options.optional() -}) - -/** - * Zod schema for user profile information. - */ -export const ZUserProfile = z.object({ - name: z.string().optional(), - username: z.string().optional(), - gender: z.string().optional(), - birthday: z.string().optional() -}).catchall(z.unknown()) - -/** - * Zod schema for the user configuration file (.tnmsc.json). - */ -export const ZUserConfigFile = z.object({ - version: z.string().optional(), - workspaceDir: z.string().optional(), - aindex: ZAindexConfig.optional(), - logLevel: z.enum(['trace', 'debug', 'info', 'warn', 'error']).optional(), - commandSeriesOptions: ZCommandSeriesOptions.optional(), - outputScopes: ZOutputScopeOptions.optional(), - frontMatter: ZFrontMatterOptions.optional(), - cleanupProtection: ZCleanupProtectionOptions.optional(), - windows: ZWindowsOptions.optional(), - profile: ZUserProfile.optional() -}) - -/** - * Zod schema for MCP project config. - */ -export const ZMcpProjectConfig = z.object({names: z.array(z.string()).optional()}) - -/** - * Zod schema for per-type series filtering configuration. - */ -export const ZTypeSeriesConfig = z.object({ - includeSeries: z.array(z.string()).optional(), - subSeries: z.record(z.string(), z.array(z.string())).optional() -}) - -/** - * Zod schema for project config. - */ -export const ZProjectConfig = z.object({ - mcp: ZMcpProjectConfig.optional(), - includeSeries: z.array(z.string()).optional(), - subSeries: z.record(z.string(), z.array(z.string())).optional(), - rules: ZTypeSeriesConfig.optional(), - skills: ZTypeSeriesConfig.optional(), - subAgents: ZTypeSeriesConfig.optional(), - commands: ZTypeSeriesConfig.optional() -}) - -/** - * Zod schema for ConfigLoader options. - */ -export const ZConfigLoaderOptions = z.object({}) - -export type AindexDirPair = z.infer -export type AindexConfig = z.infer -export type CommandSeriesPluginOverride = z.infer -export type CommandSeriesOptions = z.infer -export type OutputScope = z.infer -export type OutputScopeSelection = z.infer -export type PluginOutputScopeTopics = z.infer -export type OutputScopeOptions = z.infer -export type FrontMatterOptions = z.infer -export type ProtectionMode = z.infer -export type ProtectionRuleMatcher = z.infer -export type CleanupProtectionRule = z.infer -export type CleanupProtectionOptions = z.infer -export type StringOrStringArray = z.infer -export type WindowsWsl2Options = z.infer -export type WindowsOptions = z.infer -export type UserConfigFile = z.infer -export type McpProjectConfig = z.infer -export type TypeSeriesConfig = z.infer -export type ProjectConfig = z.infer -export type ConfigLoaderOptions = z.infer - -/** - * Result of loading a config file. - */ -export interface ConfigLoadResult { - readonly config: UserConfigFile - readonly source: string | null - readonly found: boolean -} diff --git a/cli/src/plugins/plugin-core/DistPromptGuards.test.ts b/cli/src/plugins/plugin-core/DistPromptGuards.test.ts deleted file mode 100644 index 021a392d..00000000 --- a/cli/src/plugins/plugin-core/DistPromptGuards.test.ts +++ /dev/null @@ -1,22 +0,0 @@ -import {describe, expect, it} from 'vitest' -import {assertNoResidualModuleSyntax} from './DistPromptGuards' - -describe('dist prompt guards', () => { - it('allows ordinary markdown content', () => { - expect(() => assertNoResidualModuleSyntax('# Title\n\nBody text', '/tmp/demo.mdx')).not.toThrow() - }) - - it('rejects bare module syntax outside fenced code blocks', () => { - expect(() => assertNoResidualModuleSyntax('export default\n\n# Title', '/tmp/demo.mdx')).toThrow( - 'Compiled prompt still contains residual module syntax' - ) - }) - - it('ignores module syntax inside fenced code blocks', () => { - expect(() => assertNoResidualModuleSyntax([ - '```ts', - 'export default {name: "demo"}', - '```' - ].join('\n'), '/tmp/demo.mdx')).not.toThrow() - }) -}) diff --git a/cli/src/plugins/plugin-core/DistPromptGuards.ts b/cli/src/plugins/plugin-core/DistPromptGuards.ts deleted file mode 100644 index f0f57239..00000000 --- a/cli/src/plugins/plugin-core/DistPromptGuards.ts +++ /dev/null @@ -1,68 +0,0 @@ -export interface MissingCompiledPromptErrorOptions { - readonly kind: string - readonly name: string - readonly sourcePath?: string - readonly expectedDistPath: string -} - -export class MissingCompiledPromptError extends Error { - readonly kind: string - - readonly nameOfPrompt: string - - readonly sourcePath?: string - - readonly expectedDistPath: string - - constructor(options: MissingCompiledPromptErrorOptions) { - const {kind, name, sourcePath, expectedDistPath} = options - super([ - `Missing compiled dist prompt for ${kind} "${name}".`, - ...sourcePath != null ? [`source: ${sourcePath}`] : [], - `expected dist: ${expectedDistPath}` - ].join(' ')) - this.name = 'MissingCompiledPromptError' - this.kind = kind - this.nameOfPrompt = name - if (sourcePath != null) this.sourcePath = sourcePath - this.expectedDistPath = expectedDistPath - } -} - -export class ResidualModuleSyntaxError extends Error { - readonly filePath: string - - readonly lineNumber: number - - constructor(filePath: string, lineNumber: number, lineContent: string) { - super(`Compiled prompt still contains residual module syntax at ${filePath}:${lineNumber}: ${lineContent.trim()}`) - this.name = 'ResidualModuleSyntaxError' - this.filePath = filePath - this.lineNumber = lineNumber - } -} - -const CODE_FENCE_PATTERN = /^\s*(```|~~~)/u -const RESIDUAL_MODULE_SYNTAX_PATTERNS = [ - /^\s*export\s+default\b/u, - /^\s*export\s+const\b/u, - /^\s*import\b/u -] - -export function assertNoResidualModuleSyntax(content: string, filePath: string): void { - let activeFence: string | undefined - const lines = content.split(/\r?\n/u) - - for (const [index, line] of lines.entries()) { - const fenceMatch = CODE_FENCE_PATTERN.exec(line) - if (fenceMatch?.[1] != null) { - const marker = fenceMatch[1] - if (activeFence == null) activeFence = marker - else if (activeFence === marker) activeFence = void 0 - continue - } - - if (activeFence != null) continue - if (RESIDUAL_MODULE_SYNTAX_PATTERNS.some(pattern => pattern.test(line))) throw new ResidualModuleSyntaxError(filePath, index + 1, line) - } -} diff --git a/cli/src/plugins/plugin-core/ExportMetadataTypes.ts b/cli/src/plugins/plugin-core/ExportMetadataTypes.ts deleted file mode 100644 index fb969539..00000000 --- a/cli/src/plugins/plugin-core/ExportMetadataTypes.ts +++ /dev/null @@ -1,278 +0,0 @@ -/** - * Export metadata types for MDX files - * These interfaces define the expected structure of export statements in MDX files - * that are used as front matter metadata. - * - * @module ExportMetadataTypes - */ - -import type {CodingAgentTools, NamingCaseKind, RuleScope} from './enums' -import type {SeriName} from './PromptTypes' - -/** - * Base export metadata interface - * All export metadata types should extend this - */ -export interface BaseExportMetadata { - readonly namingCase?: NamingCaseKind -} - -export interface SkillExportMetadata extends BaseExportMetadata { - readonly name?: string - readonly description: string - readonly keywords?: readonly string[] - readonly enabled?: boolean - readonly displayName?: string - readonly author?: string - readonly version?: string - readonly allowTools?: readonly (CodingAgentTools | string)[] - readonly seriName?: SeriName - readonly scope?: RuleScope -} - -export interface CommandExportMetadata extends BaseExportMetadata { - readonly description?: string - readonly argumentHint?: string - readonly allowTools?: readonly (CodingAgentTools | string)[] - readonly globalOnly?: boolean - readonly seriName?: SeriName - readonly scope?: RuleScope -} - -export interface RuleExportMetadata extends BaseExportMetadata { - readonly globs: readonly string[] - readonly description: string - readonly scope?: RuleScope - readonly seriName?: SeriName -} - -export interface SubAgentExportMetadata extends BaseExportMetadata { - readonly description: string - readonly role?: string - readonly model?: string - readonly color?: string - readonly argumentHint?: string - readonly allowTools?: readonly (CodingAgentTools | string)[] - readonly seriName?: SeriName - readonly scope?: RuleScope -} - -/** - * Metadata validation result - */ -export interface MetadataValidationResult { - readonly valid: boolean - readonly errors: readonly string[] - readonly warnings: readonly string[] -} - -/** - * Options for metadata validation - */ -export interface ValidateMetadataOptions { - readonly requiredFields: readonly (keyof T)[] - readonly optionalDefaults?: Partial - readonly filePath?: string | undefined -} - -function validateSupportedScope( - scope: unknown, - filePath?: string -): MetadataValidationResult { - const prefix = filePath != null ? ` in ${filePath}` : '' - - if (scope == null) { - return { - valid: true, - errors: [], - warnings: [] - } - } - - if (scope === 'project' || scope === 'global') { - return { - valid: true, - errors: [], - warnings: [] - } - } - - return { - valid: false, - errors: [`Field "scope" must be "project" or "global"${prefix}`], - warnings: [] - } -} - -export function validateExportMetadata( - metadata: Record, - options: ValidateMetadataOptions -): MetadataValidationResult { - const {requiredFields, optionalDefaults, filePath} = options - const errors: string[] = [] - const warnings: string[] = [] - - for (const field of requiredFields) { // Check required fields - const fieldName = String(field) - if (!(fieldName in metadata) || metadata[fieldName] == null) { - const errorMsg = filePath != null - ? `Missing required field "${fieldName}" in ${filePath}` - : `Missing required field "${fieldName}"` - errors.push(errorMsg) - } - } - - if (optionalDefaults != null) { // Check optional fields and record warnings for defaults - for (const [key, defaultValue] of Object.entries(optionalDefaults)) { - if (!(key in metadata) || metadata[key] == null) { - const warningMsg = filePath != null - ? `Using default value for optional field "${key}": ${JSON.stringify(defaultValue)} in ${filePath}` - : `Using default value for optional field "${key}": ${JSON.stringify(defaultValue)}` - warnings.push(warningMsg) - } - } - } - - return { - valid: errors.length === 0, - errors, - warnings - } -} - -/** - * Validate skill export metadata - * - * @param metadata - The metadata object to validate - * @param filePath - Optional file path for error messages - * @returns Validation result - */ -export function validateSkillMetadata( - metadata: Record, - filePath?: string -): MetadataValidationResult { - const prefix = filePath != null ? ` in ${filePath}` : '' - const errors: string[] = [] - const warnings: string[] = [] - - if (!('description' in metadata) || metadata['description'] == null) { // Check description field - must exist and not be empty - errors.push(`Missing required field "description"${prefix}`) - } else if (typeof metadata['description'] !== 'string' || metadata['description'].trim().length === 0) { - errors.push(`Required field "description" cannot be empty${prefix}`) - } - - if (metadata['enabled'] == null) { // Optional fields with defaults - warnings.push(`Using default value for optional field "enabled": true${prefix}`) - } - if (metadata['keywords'] == null) warnings.push(`Using default value for optional field "keywords": []${prefix}`) - - const scopeValidation = validateSupportedScope(metadata['scope'], filePath) - errors.push(...scopeValidation.errors) - - return { - valid: errors.length === 0, - errors, - warnings - } -} - -/** - * Validate fast command export metadata - * - * @param metadata - The metadata object to validate - * @param filePath - Optional file path for error messages - * @returns Validation result - */ -export function validateCommandMetadata( - metadata: Record, - filePath?: string -): MetadataValidationResult { - const result = validateExportMetadata(metadata, { // description is optional (can come from YAML or be omitted) // Command has no required fields from export metadata - requiredFields: [], - optionalDefaults: {}, - filePath - }) - const scopeValidation = validateSupportedScope(metadata['scope'], filePath) - - return { - valid: result.valid && scopeValidation.valid, - errors: [...result.errors, ...scopeValidation.errors], - warnings: result.warnings - } -} - -/** - * Validate sub-agent export metadata - * - * @param metadata - The metadata object to validate - * @param filePath - Optional file path for error messages - * @returns Validation result - */ -export function validateSubAgentMetadata( - metadata: Record, - filePath?: string -): MetadataValidationResult { - const result = validateExportMetadata(metadata, { - requiredFields: ['description'], - optionalDefaults: {}, - filePath - }) - const scopeValidation = validateSupportedScope(metadata['scope'], filePath) - - return { - valid: result.valid && scopeValidation.valid, - errors: [...result.errors, ...scopeValidation.errors], - warnings: result.warnings - } -} - -/** - * Validate rule export metadata - * - * @param metadata - The metadata object to validate - * @param filePath - Optional file path for error messages - * @returns Validation result - */ -export function validateRuleMetadata( - metadata: Record, - filePath?: string -): MetadataValidationResult { - const errors: string[] = [] - const warnings: string[] = [] - const prefix = filePath != null ? ` in ${filePath}` : '' - - if (!Array.isArray(metadata['globs']) || metadata['globs'].length === 0) errors.push(`Missing or empty required field "globs"${prefix}`) - else if (!metadata['globs'].every((g: unknown) => typeof g === 'string')) errors.push(`Field "globs" must be an array of strings${prefix}`) - - if (typeof metadata['description'] !== 'string' || metadata['description'].length === 0) errors.push(`Missing or empty required field "description"${prefix}`) - - const {scope, seriName} = metadata - const scopeValidation = validateSupportedScope(scope, filePath) - errors.push(...scopeValidation.errors) - - if (scope == null) warnings.push(`Using default value for optional field "scope": "project"${prefix}`) - - if (seriName != null && typeof seriName !== 'string' && !Array.isArray(seriName)) errors.push(`Field "seriName" must be a string or string array${prefix}`) - - return {valid: errors.length === 0, errors, warnings} -} - -/** - * Apply default values to metadata - * - * @param metadata - The metadata object - * @param defaults - Default values to apply - * @returns Metadata with defaults applied - */ -export function applyMetadataDefaults( - metadata: Record, - defaults: Partial -): T { - const result = {...metadata} - - for (const [key, defaultValue] of Object.entries(defaults)) { - if (!(key in result) || result[key] == null) result[key] = defaultValue - } - - return result as T -} diff --git a/cli/src/plugins/plugin-core/GlobalScopeCollector.ts b/cli/src/plugins/plugin-core/GlobalScopeCollector.ts deleted file mode 100644 index 2e6157b8..00000000 --- a/cli/src/plugins/plugin-core/GlobalScopeCollector.ts +++ /dev/null @@ -1,231 +0,0 @@ -import type {EvaluationScope} from '@truenine/md-compiler' -import type {EnvironmentContext, MdComponent, MdxGlobalScope, OsInfo, ToolReferences, UserProfile} from '@truenine/md-compiler/globals' // Collects and manages global scope variables for MDX expression evaluation. // src/scope/GlobalScopeCollector.ts -import type {UserConfigFile} from './types' -import * as os from 'node:os' -import process from 'node:process' -import {OsKind, ShellKind, ToolPresets} from '@truenine/md-compiler/globals' -import {getEffectiveHomeDir} from '@/runtime-environment' - -/** - * Tool preset names supported by GlobalScopeCollector - */ -export type ToolPresetName = keyof typeof ToolPresets - -/** - * Options for GlobalScopeCollector - */ -export interface GlobalScopeCollectorOptions { - /** User configuration file */ - readonly userConfig?: UserConfigFile | undefined - /** Tool preset to use (default: 'default') */ - readonly toolPreset?: ToolPresetName | undefined -} - -/** - * Collects global scope variables from system, environment, and user configuration. - * The collected scope is available in MDX templates via expressions like {os.platform}, {env.NODE_ENV}, etc. - */ -export class GlobalScopeCollector { - private readonly userConfig: UserConfigFile | undefined - private readonly toolPreset: ToolPresetName - - constructor(options: GlobalScopeCollectorOptions = {}) { - this.userConfig = options.userConfig - this.toolPreset = options.toolPreset ?? 'default' - } - - collect(): MdxGlobalScope { - return { - os: this.collectOsInfo(), - env: this.collectEnvContext(), - profile: this.collectProfile(), - tool: this.collectToolReferences(), - Md: this.createMdComponent() - } - } - - private collectOsInfo(): OsInfo { - const platform = os.platform() - return { - platform, - arch: os.arch(), - hostname: os.hostname(), - homedir: getEffectiveHomeDir(), - tmpdir: os.tmpdir(), - type: os.type(), - release: os.release(), - shellKind: this.detectShellKind(), - kind: this.detectOsKind(platform) - } - } - - private detectOsKind(platform: string): OsKind { - switch (platform) { - case 'win32': return OsKind.Win - case 'darwin': return OsKind.Mac - case 'linux': - case 'freebsd': - case 'openbsd': - case 'sunos': - case 'aix': return OsKind.Linux - default: return OsKind.Unknown - } - } - - private detectShellKind(): ShellKind { - const shell = process.env['SHELL'] ?? process.env['ComSpec'] ?? '' - const s = shell.toLowerCase() - - if (s.includes('bash')) return ShellKind.Bash - if (s.includes('zsh')) return ShellKind.Zsh - if (s.includes('fish')) return ShellKind.Fish - if (s.includes('pwsh')) return ShellKind.Pwsh - if (s.includes('powershell')) return ShellKind.PowerShell - if (s.includes('cmd')) return ShellKind.Cmd - if (s.endsWith('/sh')) return ShellKind.Sh - - return ShellKind.Unknown - } - - private collectEnvContext(): EnvironmentContext { - return {...process.env} - } - - private collectProfile(): UserProfile { - if (this.userConfig?.profile != null) return this.userConfig.profile as UserProfile - return {} - } - - private collectToolReferences(): ToolReferences { - const defaults: ToolReferences = {...ToolPresets.default} - if (this.toolPreset === 'claudeCode') return {...defaults, ...ToolPresets.claudeCode} - if (this.toolPreset === 'kiro') return {...defaults, ...ToolPresets.kiro} - return defaults - } - - private createMdComponent(): MdComponent { - const mdComponent = ((props: {when?: boolean, children?: unknown}) => { - if (props.when === false) return null - return props.children - }) as MdComponent - - mdComponent.Line = (props: {when?: boolean, children?: unknown}) => { - if (props.when === false) return null - return props.children - } - - return mdComponent - } -} - -/** - * Represents a single scope registration - */ -export interface ScopeRegistration { - readonly namespace: string - readonly values: Record - readonly priority: number -} - -/** - * Priority levels for scope sources. - * Higher values take precedence over lower values during merge. - */ -export enum ScopePriority { - /** System default values (os, default tool) */ - SystemDefault = 0, - /** Values from configuration file (profile, custom tool) */ - UserConfig = 10, - /** Values registered by plugins */ - PluginRegistered = 20, - /** Values passed at MDX compile time */ - CompileTime = 30 -} - -/** - * Registry for managing and merging scopes from multiple sources. - * Handles priority-based resolution when the same key exists in multiple sources. - */ -export class ScopeRegistry { - private readonly registrations: ScopeRegistration[] = [] - private globalScope: MdxGlobalScope | null = null - - setGlobalScope(scope: MdxGlobalScope): void { - this.globalScope = scope - } - - getGlobalScope(): MdxGlobalScope | null { - return this.globalScope - } - - register( - namespace: string, - values: Record, - priority: ScopePriority = ScopePriority.PluginRegistered - ): void { - this.registrations.push({namespace, values, priority}) - } - - getRegistrations(): readonly ScopeRegistration[] { - return this.registrations - } - - merge(compileTimeScope?: EvaluationScope): EvaluationScope { - const result: EvaluationScope = {} - - if (this.globalScope != null) { // 1. First add global scope (lowest priority) - result['os'] = {...this.globalScope.os} - result['env'] = {...this.globalScope.env} - result['profile'] = {...this.globalScope.profile} - result['tool'] = {...this.globalScope.tool} - } - - const sorted = [...this.registrations].sort((a, b) => a.priority - b.priority) // 2. Sort by priority and merge registered scopes - for (const reg of sorted) result[reg.namespace] = this.deepMerge(result[reg.namespace] as Record | undefined, reg.values) - - if (compileTimeScope != null) { // 3. Finally merge compile-time scope (highest priority) - for (const [key, value] of Object.entries(compileTimeScope)) { - result[key] = typeof value === 'object' && value !== null && !Array.isArray(value) - ? this.deepMerge(result[key] as Record | undefined, value as Record) - : value - } - } - - return result - } - - private deepMerge( - target: Record | undefined, - source: Record - ): Record { - if (target == null) return {...source} - - const result = {...target} - for (const [key, value] of Object.entries(source)) { - result[key] = typeof value === 'object' - && value !== null - && !Array.isArray(value) - && typeof result[key] === 'object' - && result[key] !== null - && !Array.isArray(result[key]) - ? this.deepMerge(result[key] as Record, value as Record) - : value - } - return result - } - - resolve(expression: string): string { - const scope = this.merge() - return expression.replaceAll(/\$\{([^}]+)\}/g, (_, key: string) => { - const parts = key.split('.') - let value: unknown = scope - for (const part of parts) value = (value as Record)?.[part] - return value != null ? String(value) : `\${${key}}` - }) - } - - clear(): void { - this.registrations.length = 0 - this.globalScope = null - } -} diff --git a/cli/src/plugins/plugin-core/InputTypes.ts b/cli/src/plugins/plugin-core/InputTypes.ts deleted file mode 100644 index e3785c2b..00000000 --- a/cli/src/plugins/plugin-core/InputTypes.ts +++ /dev/null @@ -1,418 +0,0 @@ -import type {AindexProjectSeriesName} from './AindexTypes' -import type {ProjectConfig} from './ConfigTypes.schema' -import type {FilePathKind, IDEKind, PromptKind, RuleScope} from './enums' -import type { - CommandYAMLFrontMatter, - FileContent, - GlobalMemoryPrompt, - Path, - ProjectChildrenMemoryPrompt, - ProjectRootMemoryPrompt, - Prompt, - RelativePath, - RuleYAMLFrontMatter, - SeriName, - SkillYAMLFrontMatter, - SubAgentYAMLFrontMatter -} from './PromptTypes' - -export interface Project { - readonly name?: string - readonly dirFromWorkspacePath?: RelativePath - readonly rootMemoryPrompt?: ProjectRootMemoryPrompt - readonly childMemoryPrompts?: readonly ProjectChildrenMemoryPrompt[] - readonly isPromptSourceProject?: boolean - readonly isWorkspaceRootProject?: boolean - readonly projectConfig?: ProjectConfig - readonly promptSeries?: AindexProjectSeriesName -} - -export interface Workspace { - readonly directory: Path - readonly projects: Project[] -} - -/** - * IDE configuration file - */ -export interface ProjectIDEConfigFile< - I extends IDEKind = IDEKind.Original -> extends FileContent { - readonly type: I -} - -/** - * AI Agent ignore configuration file - */ -export interface AIAgentIgnoreConfigFile { - readonly fileName: string - readonly content: string - readonly sourcePath?: string -} - -/** - * Shared context fields across input aggregation and output execution. - */ -interface CollectedContextData { - readonly workspace: Workspace - - /** Flat prompt projections used by current output plugins */ - readonly skills?: readonly SkillPrompt[] - readonly commands?: readonly CommandPrompt[] - readonly subAgents?: readonly SubAgentPrompt[] - readonly rules?: readonly RulePrompt[] - readonly readmePrompts?: readonly ReadmePrompt[] - readonly globalMemory?: GlobalMemoryPrompt - - /** Other non-prompt fields */ - readonly vscodeConfigFiles?: readonly ProjectIDEConfigFile[] - readonly zedConfigFiles?: readonly ProjectIDEConfigFile[] - readonly jetbrainsConfigFiles?: readonly ProjectIDEConfigFile[] - readonly editorConfigFiles?: readonly ProjectIDEConfigFile[] - readonly aiAgentIgnoreConfigFiles?: readonly AIAgentIgnoreConfigFile[] - readonly globalGitIgnore?: string - readonly shadowGitExclude?: string - readonly aindexDir?: string -} - -/** - * Input-side collected context. - * Built incrementally by input plugins through dependency-aware merging. - */ -export interface InputCollectedContext extends CollectedContextData {} - -/** - * Output-side collected context. - * Produced once from input context and consumed by output plugins only. - */ -export interface OutputCollectedContext extends CollectedContextData {} - -/** - * Convert input context to output context boundary object. - * This keeps input and output stages decoupled while preserving data shape. - */ -export function toOutputCollectedContext( - input: InputCollectedContext -): OutputCollectedContext { - return { - workspace: { - directory: input.workspace.directory, - projects: [...input.workspace.projects] - }, - ...input.skills != null && {skills: [...input.skills]}, - ...input.commands != null && {commands: [...input.commands]}, - ...input.subAgents != null && {subAgents: [...input.subAgents]}, - ...input.rules != null && {rules: [...input.rules]}, - ...input.readmePrompts != null && { - readmePrompts: [...input.readmePrompts] - }, - ...input.globalMemory != null && {globalMemory: input.globalMemory}, - ...input.vscodeConfigFiles != null && { - vscodeConfigFiles: [...input.vscodeConfigFiles] - }, - ...input.zedConfigFiles != null && { - zedConfigFiles: [...input.zedConfigFiles] - }, - ...input.jetbrainsConfigFiles != null && { - jetbrainsConfigFiles: [...input.jetbrainsConfigFiles] - }, - ...input.editorConfigFiles != null && { - editorConfigFiles: [...input.editorConfigFiles] - }, - ...input.aiAgentIgnoreConfigFiles != null && { - aiAgentIgnoreConfigFiles: [...input.aiAgentIgnoreConfigFiles] - }, - ...input.globalGitIgnore != null && { - globalGitIgnore: input.globalGitIgnore - }, - ...input.shadowGitExclude != null && { - shadowGitExclude: input.shadowGitExclude - }, - ...input.aindexDir != null && {aindexDir: input.aindexDir} - } -} - -/** - * Rule prompt with glob patterns for file-scoped rule application - */ -export interface RulePrompt extends Prompt< - PromptKind.Rule, - RuleYAMLFrontMatter, - FilePathKind.Relative, - RelativePath, - string -> { - readonly type: PromptKind.Rule - readonly prefix: string - readonly ruleName: string - readonly globs: readonly string[] - readonly scope: RuleScope - readonly seriName?: SeriName - readonly rawMdxContent?: string -} - -/** - * Command prompt - */ -export interface CommandPrompt extends Prompt< - PromptKind.Command, - CommandYAMLFrontMatter, - FilePathKind.Relative, - RelativePath, - string -> { - readonly type: PromptKind.Command - readonly globalOnly?: true - readonly commandPrefix?: string - readonly commandName: string - readonly seriName?: SeriName - readonly rawMdxContent?: string -} - -/** - * Sub-agent prompt - */ -export interface SubAgentPrompt extends Prompt< - PromptKind.SubAgent, - SubAgentYAMLFrontMatter, - FilePathKind.Relative, - RelativePath, - string -> { - readonly type: PromptKind.SubAgent - readonly agentPrefix?: string - readonly agentName: string - readonly canonicalName: string - readonly seriName?: SeriName - readonly rawMdxContent?: string -} - -/** - * Skill child document (.md files in skill directory or any subdirectory) - * Excludes skill.md which is the main skill file - */ -export interface SkillChildDoc extends Prompt { - readonly type: PromptKind.SkillChildDoc - readonly relativePath: string -} - -/** - * Resource content encoding type - */ -export type SkillResourceEncoding = 'text' | 'base64' - -/** - * Skill resource file for AI on-demand access - * Any non-.md file in skill directory or subdirectories - * - * Supports: - * - Code files: .kt, .java, .py, .ts, .js, .go, .rs, .c, .cpp, etc. - * - Data files: .sql, .json, .xml, .yaml, .csv, etc. - * - Documents: .txt, .rtf, .docx, .pdf, etc. - * - Config files: .ini, .conf, .properties, etc. - * - Scripts: .sh, .bash, .ps1, .bat, etc. - * - Images: .png, .jpg, .gif, .svg, .webp, etc. - * - Binary files: .exe, .dll, .wasm, etc. - */ -export interface SkillResource { - readonly type: PromptKind.SkillResource - readonly extension: string - readonly fileName: string - readonly relativePath: string - readonly sourcePath?: string - readonly content: string - readonly encoding: SkillResourceEncoding - readonly length: number - readonly mimeType?: string -} - -/** - * MCP server configuration entry - */ -export interface McpServerConfig { - readonly command: string - readonly args?: readonly string[] - readonly env?: Readonly> - readonly disabled?: boolean - readonly autoApprove?: readonly string[] -} - -/** - * Skill MCP configuration (mcp.json) - * - Kiro: supports per-power MCP configuration natively - * - Others: may support lazy loading in the future - */ -export interface SkillMcpConfig { - readonly type: PromptKind.SkillMcpConfig - readonly mcpServers: Readonly> - readonly rawContent: string -} - -export interface SkillPrompt extends Prompt< - PromptKind.Skill, - SkillYAMLFrontMatter -> { - readonly type: PromptKind.Skill - readonly dir: RelativePath - readonly skillName: string - readonly yamlFrontMatter: SkillYAMLFrontMatter - readonly mcpConfig?: SkillMcpConfig - readonly childDocs?: SkillChildDoc[] - readonly resources?: SkillResource[] - readonly seriName?: SeriName -} - -/** - * Readme-family source file kind - * - * - Readme: rdm.mdx → README.md - * - CodeOfConduct: coc.mdx → CODE_OF_CONDUCT.md - * - Security: security.mdx → SECURITY.md - */ -export type ReadmeFileKind = 'Readme' | 'CodeOfConduct' | 'Security' - -/** - * Mapping from ReadmeFileKind to source/output file names - */ -export const README_FILE_KIND_MAP: Readonly< - Record -> = { - Readme: {src: 'rdm.mdx', out: 'README.md'}, - CodeOfConduct: {src: 'coc.mdx', out: 'CODE_OF_CONDUCT.md'}, - Security: {src: 'security.mdx', out: 'SECURITY.md'} -} - -/** - * README-family prompt data structure (README.md, CODE_OF_CONDUCT.md, SECURITY.md) - */ -export interface ReadmePrompt extends Prompt { - readonly type: PromptKind.Readme - readonly projectName: string - readonly targetDir: RelativePath - readonly isRoot: boolean - readonly fileKind: ReadmeFileKind -} - -/** - * Supported locale codes - */ -export type Locale = 'zh' | 'en' - -export type LocalizedFileExtension = string | readonly string[] - -/** - * Localized content wrapper for a single locale - * Contains both compiled content and raw MDX source - */ -export interface LocalizedContent { - /** Compiled/processed content */ - readonly content: string - - /** Original MDX source (before compilation) */ - readonly rawMdx?: string - - /** Extracted front matter */ - readonly frontMatter?: Record - - /** File last modified timestamp */ - readonly lastModified: Date - - /** Full prompt object (optional, for extended access) */ - readonly prompt?: T - - /** Absolute file path */ - readonly filePath: string -} - -/** - * Source content container for all locales - */ -export interface LocalizedSource { - /** Default source content (.src.mdx) */ - readonly zh?: LocalizedContent - - /** English content (.mdx) */ - readonly en?: LocalizedContent - - /** Default locale content (typically zh) */ - readonly default: LocalizedContent - - /** Which locale is the default */ - readonly defaultLocale: Locale -} - -/** Universal localized prompt wrapper */ -export interface LocalizedPrompt< - T extends Prompt = Prompt, - K extends PromptKind = PromptKind -> { - readonly name: string // Prompt identifier name - readonly type: K // Prompt type kind - readonly src?: LocalizedSource // Source files content (src directory, optional when dist-only) - readonly dist?: LocalizedContent // Compiled/dist content (dist directory, optional) - - /** Metadata flags */ - readonly metadata: { - readonly hasDist: boolean // Whether dist content exists - readonly hasMultipleLocales: boolean // Whether multiple locales exist in src - readonly isDirectoryStructure: boolean // Whether this is a directory-based prompt (like skills) - - /** Available child items (for directory structures) */ - readonly children?: string[] - } - - /** File paths for all variants */ - readonly paths: { - readonly zh?: string - readonly en?: string - readonly dist?: string - } -} - -/** - * Options for reading localized prompts from different structures - */ -export interface LocalizedReadOptions { - /** File extensions for each locale */ - readonly localeExtensions: { - readonly zh: LocalizedFileExtension - readonly en: LocalizedFileExtension - } - - /** Entry file name (without extension, e.g., 'skill' for skills) */ - readonly entryFileName?: string - - /** Whether source contents should be hydrated and compiled in addition to dist */ - readonly hydrateSourceContents?: boolean - - /** Create prompt from content */ - readonly createPrompt: ( - content: string, - locale: Locale, - name: string, - metadata?: Record - ) => T | Promise - - /** Prompt kind */ - readonly kind: K - - /** Whether this is a directory-based structure */ - readonly isDirectoryStructure: boolean -} - -/** - * Result of reading a directory structure (like skills) - */ -export interface DirectoryReadResult { - readonly prompts: LocalizedPrompt[] - readonly errors: ReadError[] -} - -/** - * Error during reading - */ -export interface ReadError { - readonly path: string - readonly error: Error - readonly phase: 'scan' | 'read' | 'compile' -} diff --git a/cli/src/plugins/plugin-core/LocalizedPromptReader.ts b/cli/src/plugins/plugin-core/LocalizedPromptReader.ts deleted file mode 100644 index 6765aa08..00000000 --- a/cli/src/plugins/plugin-core/LocalizedPromptReader.ts +++ /dev/null @@ -1,736 +0,0 @@ -import type {MdxGlobalScope} from '@truenine/md-compiler/globals' -import type {PromptCompilerDiagnosticContext} from './PromptCompilerDiagnostics' -import type { - DirectoryReadResult, - ILogger, - Locale, - LocalizedContent, - LocalizedFileExtension, - LocalizedPrompt, - LocalizedReadOptions, - LoggerDiagnosticInput, - Prompt, - PromptKind, - ReadError -} from './types' -import { - buildDiagnostic, - buildFileOperationDiagnostic, - buildPromptCompilerDiagnostic, - diagnosticLines -} from '@/diagnostics' -import { - assertNoResidualModuleSyntax, - MissingCompiledPromptError, - ResidualModuleSyntaxError -} from './DistPromptGuards' -import {readPromptArtifact} from './PromptArtifactCache' -import { - formatPromptCompilerDiagnostic, - resolveSourcePathForDistFile -} from './PromptCompilerDiagnostics' - -function shouldFailFast(error: unknown): boolean { - return error instanceof MissingCompiledPromptError || error instanceof ResidualModuleSyntaxError -} - -interface ReaderDiagnosticContext { - readonly promptKind: string - readonly logicalName: string - readonly entryDistPath: string - readonly srcPath?: string -} - -/** - * Universal reader for localized prompts - * Handles reading src (multiple locales) and dist (compiled) content - * Supports directory structures (skills) and flat files (commands, subAgents) - * - * Dist is the only prompt source that may flow into final outputs. - * Source files are read only for discovery, locale metadata, and validation. - */ -export class LocalizedPromptReader { - constructor( - private fs: typeof import('node:fs'), - private path: typeof import('node:path'), - private logger: ILogger, - private globalScope?: MdxGlobalScope - ) {} - - async readDirectoryStructure< - T extends Prompt, - K extends PromptKind - >( - srcDir: string, - distDir: string, - options: LocalizedReadOptions - ): Promise> { - const prompts: LocalizedPrompt[] = [] - const errors: ReadError[] = [] - - if (!this.exists(srcDir)) return {prompts, errors} - - try { - const entries = this.fs.readdirSync(srcDir, {withFileTypes: true}) - - for (const entry of entries) { - if (!entry.isDirectory()) continue - - const {name} = entry - const srcEntryDir = this.path.join(srcDir, name) - const distEntryDir = this.path.join(distDir, name) - - try { - const localized = await this.readEntry( - name, - srcEntryDir, - distEntryDir, - options, - true - ) - - if (localized) prompts.push(localized) - } catch (error) { - errors.push({ - path: srcEntryDir, - error: error as Error, - phase: 'read' - }) - this.logger.error(buildFileOperationDiagnostic({ - code: 'LOCALIZED_PROMPT_ENTRY_READ_FAILED', - title: 'Failed to read localized prompt entry', - operation: 'read', - targetKind: `${String(options.kind)} prompt entry`, - path: srcEntryDir, - error, - details: { - entryName: name, - promptKind: String(options.kind) - } - })) - if (shouldFailFast(error)) throw error - } - } - } catch (error) { - errors.push({ - path: srcDir, - error: error as Error, - phase: 'scan' - }) - this.logger.error(buildFileOperationDiagnostic({ - code: 'LOCALIZED_PROMPT_DIRECTORY_SCAN_FAILED', - title: 'Failed to scan localized prompt source directory', - operation: 'scan', - targetKind: `${String(options.kind)} prompt source directory`, - path: srcDir, - error, - details: { - promptKind: String(options.kind) - } - })) - if (shouldFailFast(error)) throw error - } - - return {prompts, errors} - } - - async readFlatFiles< - T extends Prompt, - K extends PromptKind - >( - srcDir: string, - distDir: string, - options: LocalizedReadOptions - ): Promise> { - const prompts: LocalizedPrompt[] = [] - const errors: ReadError[] = [] - - const srcExists = this.exists(srcDir) - const distExists = this.exists(distDir) - - this.logger.debug(`readFlatFiles: srcDir=${srcDir}, exists=${srcExists}`) - this.logger.debug(`readFlatFiles: distDir=${distDir}, exists=${distExists}`) - - if (!srcExists && !distExists) return {prompts, errors} - - const zhExtensions = this.normalizeExtensions(options.localeExtensions.zh) - const seenNames = new Set() - - const readPrompt = async (fullName: string, filePath: string): Promise => { - if (seenNames.has(fullName)) return - seenNames.add(fullName) - - try { - const localized = await this.readFlatEntry( - fullName, - srcDir, - distDir, - fullName, - options - ) - - if (localized) prompts.push(localized) - } catch (error) { - errors.push({ - path: filePath, - error: error as Error, - phase: 'read' - }) - this.logger.error(buildFileOperationDiagnostic({ - code: 'LOCALIZED_PROMPT_FILE_READ_FAILED', - title: 'Failed to read localized prompt file', - operation: 'read', - targetKind: `${String(options.kind)} prompt file`, - path: filePath, - error, - details: { - promptKind: String(options.kind), - logicalName: fullName - } - })) - if (shouldFailFast(error)) throw error - } - } - - const scanSourceDirectory = async (currentSrcDir: string, relativePath: string = ''): Promise => { - if (!this.exists(currentSrcDir)) return - - try { - const entries = this.fs.readdirSync(currentSrcDir, {withFileTypes: true}) - for (const entry of entries) { - const entryRelativePath = relativePath - ? this.path.join(relativePath, entry.name) - : entry.name - - if (entry.isDirectory()) { - await scanSourceDirectory(this.path.join(currentSrcDir, entry.name), entryRelativePath) - continue - } - - const matchedExtension = this.findMatchingExtension(entry.name, zhExtensions) - if (!entry.isFile() || matchedExtension == null) continue - - const baseName = entry.name.slice(0, -matchedExtension.length) - const fullName = relativePath - ? this.path.join(relativePath, baseName) - : baseName - - await readPrompt(fullName, this.path.join(currentSrcDir, entry.name)) - } - } catch (error) { - errors.push({ - path: currentSrcDir, - error: error as Error, - phase: 'scan' - }) - this.logger.error(buildFileOperationDiagnostic({ - code: 'LOCALIZED_SOURCE_DIRECTORY_SCAN_FAILED', - title: 'Failed to scan localized source directory', - operation: 'scan', - targetKind: `${String(options.kind)} source directory`, - path: currentSrcDir, - error, - details: { - promptKind: String(options.kind) - } - })) - if (shouldFailFast(error)) throw error - } - } - - const scanDistDirectory = async (currentDistDir: string, relativePath: string = ''): Promise => { - if (!this.exists(currentDistDir)) return - - try { - const entries = this.fs.readdirSync(currentDistDir, {withFileTypes: true}) - for (const entry of entries) { - const entryRelativePath = relativePath - ? this.path.join(relativePath, entry.name) - : entry.name - - if (entry.isDirectory()) { - await scanDistDirectory(this.path.join(currentDistDir, entry.name), entryRelativePath) - continue - } - - if (!entry.isFile() || !entry.name.endsWith('.mdx')) continue - - const baseName = entry.name.slice(0, -'.mdx'.length) - const fullName = relativePath - ? this.path.join(relativePath, baseName) - : baseName - - await readPrompt(fullName, this.path.join(currentDistDir, entry.name)) - } - } catch (error) { - errors.push({ - path: currentDistDir, - error: error as Error, - phase: 'scan' - }) - this.logger.error(buildFileOperationDiagnostic({ - code: 'LOCALIZED_DIST_DIRECTORY_SCAN_FAILED', - title: 'Failed to scan localized dist directory', - operation: 'scan', - targetKind: `${String(options.kind)} dist directory`, - path: currentDistDir, - error, - details: { - promptKind: String(options.kind) - } - })) - if (shouldFailFast(error)) throw error - } - } - - if (srcExists) await scanSourceDirectory(srcDir) - if (distExists) await scanDistDirectory(distDir) - - return {prompts, errors} - } - - async readSingleFile< - T extends Prompt, - K extends PromptKind - >( - srcBasePath: string, // Path without extension - distBasePath: string, - options: LocalizedReadOptions - ): Promise | null> { - const name = this.path.basename(srcBasePath) - - return this.readFlatEntry(name, this.path.dirname(srcBasePath), this.path.dirname(distBasePath), srcBasePath, options, true) - } - - private async readEntry< - T extends Prompt, - K extends PromptKind - >( - name: string, - srcEntryDir: string, - distEntryDir: string, - options: LocalizedReadOptions, - isDirectoryStructure = true - ): Promise | null> { - const {localeExtensions, entryFileName, createPrompt, kind} = options - const hydrateSourceContents = options.hydrateSourceContents ?? true - - const baseFileName = entryFileName ?? name - const zhExtensions = this.normalizeExtensions(localeExtensions.zh) - const enExtensions = this.normalizeExtensions(localeExtensions.en) - const srcZhPath = this.resolveLocalizedPath(srcEntryDir, baseFileName, zhExtensions) - const srcEnPath = this.resolveLocalizedPath(srcEntryDir, baseFileName, enExtensions) - const distPath = this.path.join(distEntryDir, `${baseFileName}.mdx`) - const hasSourceZh = this.exists(srcZhPath) - const hasSourceEn = this.exists(srcEnPath) - const existingSourcePath = hasSourceZh - ? srcZhPath - : hasSourceEn - ? srcEnPath - : void 0 - const diagnosticContext: ReaderDiagnosticContext = { - promptKind: String(kind), - logicalName: name, - entryDistPath: distPath, - ...existingSourcePath != null && {srcPath: existingSourcePath} - } - - const distContent = await this.readDistContent(distPath, createPrompt, name, diagnosticContext) - const zhContent = hasSourceZh && hydrateSourceContents - ? await this.readLocaleContent(srcZhPath, 'zh', createPrompt, name, String(kind)) - : null - const enContent = hasSourceEn && hydrateSourceContents - ? await this.readLocaleContent(srcEnPath, 'en', createPrompt, name, String(kind)) - : null - - const hasDist = distContent != null - const sourcePath = hasSourceZh ? srcZhPath : hasSourceEn ? srcEnPath : void 0 - - if (!hasDist && !hasSourceZh && !hasSourceEn) { - this.logger.warn(buildDiagnostic({ - code: 'LOCALIZED_PROMPT_ARTIFACTS_MISSING', - title: `Missing source and dist prompt artifacts for ${name}`, - rootCause: diagnosticLines( - `tnmsc could not find either the source prompt or the compiled dist prompt for "${name}".` - ), - exactFix: diagnosticLines( - 'Create the source prompt and rebuild the compiled dist prompt before retrying tnmsc.' - ), - details: { - promptKind: String(kind), - name, - srcZhPath, - srcEnPath, - distPath - } - })) - return null - } - - if (!hasDist) { - throw new MissingCompiledPromptError({ - kind: String(kind), - name, - ...sourcePath != null && {sourcePath}, - expectedDistPath: distPath - }) - } - - const src: LocalizedPrompt['src'] = hydrateSourceContents && zhContent != null - ? { - zh: zhContent, - ...enContent != null && {en: enContent}, - default: zhContent, - defaultLocale: 'zh' - } - : void 0 - - const children = isDirectoryStructure - ? this.scanChildren(distEntryDir, baseFileName, ['.mdx']) - : void 0 - - return { - name, - type: kind, - ...src != null && {src}, - ...hasDist && {dist: distContent}, - metadata: { - hasDist, - hasMultipleLocales: hasSourceEn, - isDirectoryStructure, - ...children && children.length > 0 && {children} - }, - paths: { - ...hasSourceZh && {zh: srcZhPath}, - ...hasSourceEn && {en: srcEnPath}, - ...hasDist && {dist: distPath} - } - } - } - - private async readFlatEntry< - T extends Prompt, - K extends PromptKind - >( - name: string, - srcDir: string, - distDir: string, - baseName: string, - options: LocalizedReadOptions, - isSingleFile = false - ): Promise | null> { - const {localeExtensions, createPrompt, kind} = options - const hydrateSourceContents = options.hydrateSourceContents ?? true - - const zhExtensions = this.normalizeExtensions(localeExtensions.zh) - const enExtensions = this.normalizeExtensions(localeExtensions.en) - const srcZhPath = this.resolveLocalizedPath('', baseName, zhExtensions) - const srcEnPath = this.resolveLocalizedPath('', baseName, enExtensions) - const distPath = this.path.join(distDir, `${name}.mdx`) - - const fullSrcZhPath = isSingleFile ? srcZhPath : this.path.join(srcDir, srcZhPath) - const fullSrcEnPath = isSingleFile ? srcEnPath : this.path.join(srcDir, srcEnPath) - const hasSourceZh = this.exists(fullSrcZhPath) - const hasSourceEn = this.exists(fullSrcEnPath) - const existingSourcePath = hasSourceZh - ? fullSrcZhPath - : hasSourceEn - ? fullSrcEnPath - : void 0 - const diagnosticContext: ReaderDiagnosticContext = { - promptKind: String(kind), - logicalName: name, - entryDistPath: distPath, - ...existingSourcePath != null && {srcPath: existingSourcePath} - } - - const distContent = await this.readDistContent(distPath, createPrompt, name, diagnosticContext) - const zhContent = hasSourceZh && hydrateSourceContents - ? await this.readLocaleContent(fullSrcZhPath, 'zh', createPrompt, name, String(kind)) - : null - const enContent = hasSourceEn && hydrateSourceContents - ? await this.readLocaleContent(fullSrcEnPath, 'en', createPrompt, name, String(kind)) - : null - - const hasDist = distContent != null - const sourcePath = hasSourceZh ? fullSrcZhPath : hasSourceEn ? fullSrcEnPath : void 0 - - if (!hasDist && !hasSourceZh && !hasSourceEn) { - this.logger.warn(buildDiagnostic({ - code: 'LOCALIZED_PROMPT_ARTIFACTS_MISSING', - title: `Missing source and dist prompt artifacts for ${name}`, - rootCause: diagnosticLines( - `tnmsc could not find either the source prompt or the compiled dist prompt for "${name}".` - ), - exactFix: diagnosticLines( - 'Create the source prompt and rebuild the compiled dist prompt before retrying tnmsc.' - ), - details: { - promptKind: String(kind), - name, - srcZhPath: fullSrcZhPath, - srcEnPath: fullSrcEnPath, - distPath - } - })) - return null - } - - if (!hasDist) { - throw new MissingCompiledPromptError({ - kind: String(kind), - name, - ...sourcePath != null && {sourcePath}, - expectedDistPath: distPath - }) - } - - const src: LocalizedPrompt['src'] = hydrateSourceContents && zhContent != null - ? { - zh: zhContent, - ...enContent != null && {en: enContent}, - default: zhContent, - defaultLocale: 'zh' - } - : void 0 - - return { - name, - type: kind, - ...src != null && {src}, - ...hasDist && {dist: distContent}, - metadata: { - hasDist, - hasMultipleLocales: hasSourceEn, - isDirectoryStructure: false - }, - paths: { - ...hasSourceZh && {zh: fullSrcZhPath}, - ...hasSourceEn && {en: fullSrcEnPath}, - ...hasDist && {dist: distPath} - } - } - } - - private async readLocaleContent( - filePath: string, - locale: Locale, - createPrompt: (content: string, locale: Locale, name: string, metadata?: Record) => T | Promise, - name: string, - promptKind: string - ): Promise | null> { - if (!this.exists(filePath)) return null - - try { - const artifact = await readPromptArtifact(filePath, { - mode: 'source', - globalScope: this.globalScope - }) - assertNoResidualModuleSyntax(artifact.content, filePath) - - const prompt = await createPrompt(artifact.content, locale, name, artifact.metadata) - - const result: LocalizedContent = { - content: artifact.content, - lastModified: artifact.lastModified, - filePath - } - - if (artifact.rawMdx.length > 0) { - Object.assign(result, {rawMdx: artifact.rawMdx}) - } - if (artifact.parsed.yamlFrontMatter != null) Object.assign(result, {frontMatter: artifact.parsed.yamlFrontMatter}) - if (prompt != null) Object.assign(result, {prompt}) - - return result - } catch (error) { - this.logger.error(buildPromptCompilerDiagnostic({ - code: 'LOCALIZED_SOURCE_PROMPT_READ_FAILED', - title: 'Failed to read localized source prompt', - diagnosticText: formatPromptCompilerDiagnostic(error, { - operation: 'Failed to read localized source prompt.', - promptKind, - logicalName: name, - distPath: filePath - }), - details: { - promptKind, - locale, - filePath - } - })) - throw error - } - } - - private async readDistContent( - filePath: string, - createPrompt: (content: string, locale: Locale, name: string, metadata?: Record) => T | Promise, - name: string, - diagnosticContext: ReaderDiagnosticContext - ): Promise | null> { - if (!this.exists(filePath)) return null - - try { - const artifact = await readPromptArtifact(filePath, { - mode: 'dist', - globalScope: this.globalScope - }) - assertNoResidualModuleSyntax(artifact.content, filePath) - - const prompt = await createPrompt( - artifact.content, - 'zh', - name, - artifact.metadata - ) - - const result: LocalizedContent = { - content: artifact.content, - lastModified: artifact.lastModified, - prompt, - filePath, - rawMdx: artifact.rawMdx - } - - if (artifact.parsed.yamlFrontMatter != null) Object.assign(result, {frontMatter: artifact.parsed.yamlFrontMatter}) - return result - } catch (error) { - this.logger.error(this.buildDistReadDiagnostic(error, filePath, diagnosticContext)) - throw error - } - } - - private buildDistReadDiagnostic( - error: unknown, - filePath: string, - context: ReaderDiagnosticContext - ): LoggerDiagnosticInput { - const mappedSourcePath = resolveSourcePathForDistFile(this.path, filePath, { - preferredSourcePath: filePath === context.entryDistPath ? context.srcPath : void 0, - distRootDir: this.path.dirname(context.entryDistPath), - srcRootDir: context.srcPath != null ? this.path.dirname(context.srcPath) : void 0 - }) - const formattedContext: PromptCompilerDiagnosticContext = { - operation: 'Failed to read dist content.', - promptKind: context.promptKind, - logicalName: context.logicalName, - entryDistPath: context.entryDistPath, - distPath: filePath, - srcPath: mappedSourcePath - } - return buildPromptCompilerDiagnostic({ - code: 'LOCALIZED_DIST_PROMPT_READ_FAILED', - title: 'Failed to read localized dist prompt', - diagnosticText: formatPromptCompilerDiagnostic(error, formattedContext), - details: { - promptKind: context.promptKind, - logicalName: context.logicalName, - filePath, - srcPath: mappedSourcePath - } - }) - } - - private scanChildren( - dir: string, - entryFileName: string, - zhExtensions: readonly string[] - ): string[] { - const children: string[] = [] - - if (!this.exists(dir)) return children - - const entryFullNames = new Set(zhExtensions.map(extension => `${entryFileName}${extension}`)) - - try { - const scanDir = (currentDir: string, relativePath: string): void => { - const entries = this.fs.readdirSync(currentDir, {withFileTypes: true}) - - for (const entry of entries) { - const fullPath = this.path.join(currentDir, entry.name) - const relativeFullPath = relativePath - ? this.path.join(relativePath, entry.name) - : entry.name - - if (entry.isDirectory()) scanDir(fullPath, relativeFullPath) - else { - const matchedExtension = this.findMatchingExtension(entry.name, zhExtensions) - if (matchedExtension == null || entryFullNames.has(entry.name)) continue - - const nameWithoutExt = entry.name.slice(0, -matchedExtension.length) // Child doc: relative path without extension - const relativeDir = this.path.dirname(relativeFullPath) - const childPath = relativeDir === '.' - ? nameWithoutExt - : this.path.join(relativeDir, nameWithoutExt) - children.push(childPath) - } - } - } - - scanDir(dir, '') - } catch (error) { - this.logger.warn(buildFileOperationDiagnostic({ - code: 'LOCALIZED_PROMPT_CHILD_SCAN_FAILED', - title: 'Failed to scan localized prompt child documents', - operation: 'scan', - targetKind: 'localized prompt child directory', - path: dir, - error - })) - } - - return children - } - - private exists(path: string): boolean { - try { - return this.fs.existsSync(path) - } catch { - return false - } - } - - private normalizeExtensions(extension: LocalizedFileExtension): readonly string[] { - return typeof extension === 'string' - ? [extension] - : extension - } - - private findMatchingExtension(fileName: string, extensions: readonly string[]): string | undefined { - return extensions.find(extension => fileName.endsWith(extension)) - } - - private resolveLocalizedPath(dir: string, baseFileName: string, extensions: readonly string[]): string { - const defaultPath = dir === '' - ? `${baseFileName}${extensions[0]}` - : this.path.join(dir, `${baseFileName}${extensions[0]}`) - - for (const extension of extensions) { - const candidate = dir === '' - ? `${baseFileName}${extension}` - : this.path.join(dir, `${baseFileName}${extension}`) - if (this.exists(candidate)) return candidate - } - - return defaultPath - } -} - -/** - * Factory function to create a LocalizedPromptReader - */ -export function createLocalizedPromptReader( - fs: typeof import('node:fs'), - path: typeof import('node:path'), - logger: ILogger, - globalScope?: MdxGlobalScope -): LocalizedPromptReader { - return new LocalizedPromptReader(fs, path, logger, globalScope) -} - -export { - type DirectoryReadResult, - type LocalizedReadOptions, - type ReadError -} from './types' diff --git a/cli/src/plugins/plugin-core/McpConfigManager.ts b/cli/src/plugins/plugin-core/McpConfigManager.ts deleted file mode 100644 index 6ed7e68c..00000000 --- a/cli/src/plugins/plugin-core/McpConfigManager.ts +++ /dev/null @@ -1,251 +0,0 @@ -import type {ILogger, McpServerConfig, SkillPrompt} from './types' -import * as path from 'node:path' -import {buildFileOperationDiagnostic} from '@/diagnostics' -import {resolveSkillName} from './PromptIdentity' - -/** - * MCP configuration format type - */ -export type McpConfigFormat = 'cursor' | 'opencode' - -/** - * MCP config entry for a single server - */ -export interface McpServerEntry { - readonly name: string - readonly config: McpServerConfig -} - -/** - * Transformed MCP server config for different output formats - */ -export interface TransformedMcpConfig { - [serverName: string]: Record -} - -/** - * Result of MCP config write operation - */ -export interface McpWriteResult { - readonly success: boolean - readonly path: string - readonly serverCount: number - readonly error?: Error - readonly skipped?: boolean -} - -/** - * MCP configuration transformer function type - */ -export type McpConfigTransformer = (config: McpServerConfig) => Record - -export function collectMcpServersFromSkills(skills: readonly SkillPrompt[], logger?: ILogger): Map { - const merged = new Map() - const serverCountsBySkill = new Map() - - for (const skill of skills) { - if (skill.mcpConfig == null) continue - - const skillName = resolveSkillName(skill) - let count = 0 - for (const [name, config] of Object.entries(skill.mcpConfig.mcpServers)) { - merged.set(name, config) - count++ - } - if (count > 0) { - serverCountsBySkill.set(skillName, count) - } - } - - // Emit aggregated summary log instead of per-item logs - if (serverCountsBySkill.size > 0 && logger == null) return merged - - const totalServers = [...serverCountsBySkill.values()].reduce((a, b) => a + b, 0) - logger?.debug('mcp servers collected', { - totalSkills: serverCountsBySkill.size, - totalServers, - bySkill: Object.fromEntries(serverCountsBySkill) - }) - return merged -} - -export function transformMcpServerMap(servers: Map, transformer: McpConfigTransformer): TransformedMcpConfig { - const result: TransformedMcpConfig = {} - - for (const [name, config] of servers) result[name] = transformer(config) - - return result -} - -/** - * MCP Config Manager - * Handles merging and writing MCP configurations from skills to various output formats - */ -export class McpConfigManager { - private readonly fs: typeof import('node:fs') - private readonly logger: ILogger - - constructor(deps: {fs: typeof import('node:fs'), logger: ILogger}) { - this.fs = deps.fs - this.logger = deps.logger - } - - collectMcpServers(skills: readonly SkillPrompt[]): Map { - return collectMcpServersFromSkills(skills, this.logger) - } - - transformMcpServers(servers: Map, transformer: McpConfigTransformer): TransformedMcpConfig { - return transformMcpServerMap(servers, transformer) - } - - readExistingConfig(configPath: string): Record { - try { - if (this.fs.existsSync(configPath)) { - const content = this.fs.readFileSync(configPath, 'utf8') - return JSON.parse(content) as Record - } - } catch (error) { - this.logger.warn( - buildFileOperationDiagnostic({ - code: 'MCP_CONFIG_READ_FAILED', - title: 'Failed to read existing MCP config', - operation: 'read', - targetKind: 'MCP config file', - path: configPath, - error, - details: { - fallback: 'starting fresh' - } - }) - ) - } - return {} - } - - writeCursorMcpConfig(configPath: string, servers: TransformedMcpConfig, dryRun: boolean): McpWriteResult { - const existingConfig = this.readExistingConfig(configPath) - const existingMcpServers = (existingConfig['mcpServers'] as Record) ?? {} - - existingConfig['mcpServers'] = {...existingMcpServers, ...servers} - const content = JSON.stringify(existingConfig, null, 2) - - return this.writeConfigFile(configPath, content, Object.keys(servers).length, dryRun) - } - - writeOpencodeMcpConfig(configPath: string, servers: TransformedMcpConfig, dryRun: boolean, additionalConfig?: Record): McpWriteResult { - const existingConfig = this.readExistingConfig(configPath) - - const mergedConfig = { - // Merge with additional config (like $schema, plugin array) - ...existingConfig, - ...additionalConfig, - mcp: servers - } - - const content = JSON.stringify(mergedConfig, null, 2) - return this.writeConfigFile(configPath, content, Object.keys(servers).length, dryRun) - } - - writeSkillMcpConfig(configPath: string, rawContent: string, dryRun: boolean): McpWriteResult { - return this.writeConfigFile(configPath, rawContent, 1, dryRun) - } - - private ensureDirectory(dir: string): void { - if (!this.fs.existsSync(dir)) this.fs.mkdirSync(dir, {recursive: true}) - } - - private writeConfigFile(configPath: string, content: string, serverCount: number, dryRun: boolean): McpWriteResult { - if (dryRun) { - this.logger.trace({ - action: 'dryRun', - type: 'mcpConfig', - path: configPath, - serverCount - }) - return {success: true, path: configPath, serverCount, skipped: true} - } - - try { - this.ensureDirectory(path.dirname(configPath)) - this.fs.writeFileSync(configPath, content) - this.logger.trace({ - action: 'write', - type: 'mcpConfig', - path: configPath, - serverCount - }) - return {success: true, path: configPath, serverCount} - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.logger.error( - buildFileOperationDiagnostic({ - code: 'MCP_CONFIG_WRITE_FAILED', - title: 'Failed to write MCP config', - operation: 'write', - targetKind: 'MCP config file', - path: configPath, - error: errMsg - }) - ) - return { - success: false, - path: configPath, - serverCount: 0, - error: error as Error - } - } - } -} - -/** - * Transform MCP config for Cursor format - * Keeps standard MCP structure with command/args/env or url/headers - */ -export function transformMcpConfigForCursor(config: McpServerConfig): Record { - const result: Record = {} - - if (config.command != null) { - result['command'] = config.command - if (config.args != null) result['args'] = config.args - if (config.env != null) result['env'] = config.env - return result - } - - const configRecord = config as unknown as Record - const url = configRecord['url'] ?? configRecord['serverUrl'] - - if (url == null) return result - - result['url'] = url - const {headers} = configRecord - if (headers != null) result['headers'] = headers - - return result -} - -/** - * Transform MCP config for Opencode format - * Converts to local (command array) or remote (url) format with enabled flag - */ -export function transformMcpConfigForOpencode(config: McpServerConfig): Record { - const result: Record = {} - - if (config.command != null) { - result['type'] = 'local' - const commandArray = [config.command] - if (config.args != null) commandArray.push(...config.args) - result['command'] = commandArray - if (config.env != null) result['environment'] = config.env - } else { - result['type'] = 'remote' - const configRecord = config as unknown as Record - if (configRecord['url'] != null) result['url'] = configRecord['url'] - else if (configRecord['serverUrl'] != null) { - result['url'] = configRecord['serverUrl'] - } - } - - result['enabled'] = config.disabled !== true - - return result -} diff --git a/cli/src/plugins/plugin-core/OutputTypes.ts b/cli/src/plugins/plugin-core/OutputTypes.ts deleted file mode 100644 index 64f1c147..00000000 --- a/cli/src/plugins/plugin-core/OutputTypes.ts +++ /dev/null @@ -1,145 +0,0 @@ -import type {GlobalConfigDirectoryType} from './enums' -import type {SubAgentPrompt} from './InputTypes' -import type {AbsolutePath, RelativePath} from './PromptTypes' - -/** - * Global configuration based on user_home root directory - */ -export interface GlobalConfigDirectoryInUserHome { - readonly type: K - readonly directory: RelativePath -} - -/** - * Special, absolute path global memory prompt - */ -export interface GlobalConfigDirectoryInOther { - readonly type: K - readonly directory: AbsolutePath -} - -export type GlobalConfigDirectory = GlobalConfigDirectoryInUserHome | GlobalConfigDirectoryInOther - -export interface Target { - -} - -/** - * SubAgent frontmatter field mapping - * Value can be a static string or a function that extracts value from SubAgentPrompt - */ -export type SubAgentFrontMatterField = string | ((subAgent: SubAgentPrompt) => unknown) - -/** - * SubAgent output configuration for declarative configuration - */ -export interface SubAgentOutputConfig { - /** Output subdirectory name (relative to IDE config directory) */ - readonly subDir?: string - - /** File name format template */ - readonly fileNameTemplate?: 'prefix-agent' | 'prefix_agent' | 'agent' | string - - /** Whether to include series prefix */ - readonly includeSeriesPrefix?: boolean - - /** Series prefix separator */ - readonly seriesSeparator?: string - - /** Frontmatter configuration */ - readonly frontMatter?: { - /** Custom field mappings */ - readonly fields?: Record - /** Fields to exclude */ - readonly exclude?: string[] - } - - /** Content transformation options */ - readonly contentTransform?: { - /** Whether to transform MDX references to Markdown */ - readonly transformMdxRefs?: boolean - /** Custom content processor */ - readonly processor?: (content: string, subAgent: SubAgentPrompt) => string - } -} - -/** - * Generic registry data structure. - * All registry files must have version and lastUpdated fields. - */ -export interface RegistryData { - readonly version: string - readonly lastUpdated: string -} - -/** - * Result of a registry operation. - */ -export interface RegistryOperationResult { - readonly success: boolean - readonly entryName: string - readonly error?: Error -} - -/** - * Source information for a Kiro power. - * Indicates the origin type of a registered power. - */ -export interface KiroPowerSource { - readonly type: 'local' | 'repo' | 'registry' - readonly repoId?: string - readonly repoName?: string - readonly cloneId?: string -} - -/** - * A single power entry in the Kiro registry. - * Contains metadata about an installed power. - */ -export interface KiroPowerEntry { - readonly name: string - readonly description: string - readonly mcpServers?: readonly string[] - readonly author?: string - readonly keywords: readonly string[] - readonly displayName?: string - readonly installed: boolean - readonly installedAt?: string - readonly installPath?: string - readonly source: KiroPowerSource - readonly sourcePath?: string -} - -/** - * Repository source tracking in Kiro registry. - * Tracks the source/origin of registered items. - */ -export interface KiroRepoSource { - readonly name: string - readonly type: 'local' | 'git' - readonly enabled: boolean - readonly addedAt?: string - readonly powerCount: number - readonly path?: string - readonly lastSync?: string - readonly powers?: readonly string[] -} - -/** - * Kiro recommended repo metadata (preserved during updates). - */ -export interface KiroRecommendedRepo { - readonly url: string - readonly lastFetch: string - readonly powerCount: number -} - -/** - * Complete Kiro powers registry structure. - * Represents the full ~/.kiro/powers/registry.json file. - */ -export interface KiroPowersRegistry extends RegistryData { - readonly powers: Record - readonly repoSources: Record - readonly kiroRecommendedRepo?: KiroRecommendedRepo -} diff --git a/cli/src/plugins/plugin-core/PromptArtifactCache.test.ts b/cli/src/plugins/plugin-core/PromptArtifactCache.test.ts deleted file mode 100644 index 9708baf5..00000000 --- a/cli/src/plugins/plugin-core/PromptArtifactCache.test.ts +++ /dev/null @@ -1,203 +0,0 @@ -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import {afterEach, describe, expect, it, vi} from 'vitest' - -import { - clearPromptArtifactCache, - compileRawPromptArtifact, - readPromptArtifact -} from './PromptArtifactCache' - -const {mdxToMdMock, parseMarkdownMock} = vi.hoisted(() => ({ - mdxToMdMock: vi.fn(async (content: string) => ({ - content: `compiled:${content.trim()}`, - metadata: { - fields: { - compiled: true - } - } - })), - parseMarkdownMock: vi.fn((content: string) => { - const frontMatterMatch = /^---\n([\s\S]*?)\n---\n?([\s\S]*)$/u.exec(content) - - if (frontMatterMatch != null) { - const rawFrontMatter = `---\n${frontMatterMatch[1]}\n---` - const markdownContent = frontMatterMatch[2].trim() - - return { - yamlFrontMatter: { - title: 'frontmatter' - }, - rawFrontMatter, - contentWithoutFrontMatter: markdownContent, - markdownAst: { - type: 'root' - }, - markdownContents: [markdownContent] - } - } - - const trimmed = content.trim() - return { - yamlFrontMatter: void 0, - rawFrontMatter: void 0, - contentWithoutFrontMatter: trimmed, - markdownAst: { - type: 'root' - }, - markdownContents: [trimmed] - } - }) -})) - -vi.mock('@truenine/md-compiler', () => ({ - mdxToMd: mdxToMdMock -})) - -vi.mock('@truenine/md-compiler/markdown', () => ({ - parseMarkdown: parseMarkdownMock -})) - -afterEach(() => { - clearPromptArtifactCache() - vi.clearAllMocks() -}) - -describe('prompt artifact cache', () => { - it('caches repeated source prompt compilation by file mtime', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-prompt-cache-source-')) - const filePath = path.join(tempDir, 'prompt.src.mdx') - - try { - fs.writeFileSync(filePath, 'Hello prompt', 'utf8') - - const first = await readPromptArtifact(filePath, { - mode: 'source' - }) - const second = await readPromptArtifact(filePath, { - mode: 'source' - }) - - expect(first.content).toBe('compiled:Hello prompt') - expect(second.content).toBe('compiled:Hello prompt') - expect(mdxToMdMock).toHaveBeenCalledTimes(1) - } - finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('reads export-default dist artifacts without recompiling', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-prompt-cache-dist-')) - const filePath = path.join(tempDir, 'prompt.mdx') - - try { - fs.writeFileSync(filePath, [ - 'export default {', - ' description: \'dist description\',', - ' version: \'1.0.0\'', - '}', - '', - 'Compiled body', - '' - ].join('\n'), 'utf8') - - const artifact = await readPromptArtifact(filePath, { - mode: 'dist' - }) - - expect(artifact.content).toBe('Compiled body') - expect(artifact.metadata).toEqual({ - description: 'dist description', - version: '1.0.0' - }) - expect(mdxToMdMock).not.toHaveBeenCalled() - } - finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('still compiles frontmatter dist artifacts so MDX body syntax is resolved', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-prompt-cache-frontmatter-dist-')) - const filePath = path.join(tempDir, 'prompt.mdx') - - try { - fs.writeFileSync(filePath, [ - '---', - 'title: demo', - '---', - '', - 'Hello {profile.name}', - '' - ].join('\n'), 'utf8') - - const artifact = await readPromptArtifact(filePath, { - mode: 'dist' - }) - - expect(artifact.content).toContain('compiled:') - expect(artifact.metadata).toEqual({ - compiled: true - }) - expect(mdxToMdMock).toHaveBeenCalledTimes(1) - } - finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('falls back to mdx compilation when export-default metadata is not JSON5-compatible', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-prompt-cache-dist-fallback-')) - const filePath = path.join(tempDir, 'prompt.mdx') - - try { - fs.writeFileSync(filePath, [ - 'export default {', - ' description: `template literal metadata`,', - '}', - '', - 'Compiled body', - '' - ].join('\n'), 'utf8') - - const artifact = await readPromptArtifact(filePath, { - mode: 'dist' - }) - - expect(artifact.content).toContain('compiled:export default') - expect(mdxToMdMock).toHaveBeenCalledTimes(1) - } - finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('caches raw prompt recompilation for identical tool preset inputs', async () => { - const resultA = await compileRawPromptArtifact({ - filePath: '/tmp/command.mdx', - rawMdx: 'Tool preset body', - cacheMtimeMs: 42, - globalScope: { - tool: { - preset: 'demo' - } - } as never - }) - const resultB = await compileRawPromptArtifact({ - filePath: '/tmp/command.mdx', - rawMdx: 'Tool preset body', - cacheMtimeMs: 42, - globalScope: { - tool: { - preset: 'demo' - } - } as never - }) - - expect(resultA.content).toBe('compiled:Tool preset body') - expect(resultB.content).toBe('compiled:Tool preset body') - expect(mdxToMdMock).toHaveBeenCalledTimes(1) - }) -}) diff --git a/cli/src/plugins/plugin-core/PromptArtifactCache.ts b/cli/src/plugins/plugin-core/PromptArtifactCache.ts deleted file mode 100644 index 2ad98dfe..00000000 --- a/cli/src/plugins/plugin-core/PromptArtifactCache.ts +++ /dev/null @@ -1,317 +0,0 @@ -import type {MdxGlobalScope} from '@truenine/md-compiler/globals' -import type {ParsedMarkdown} from '@truenine/md-compiler/markdown' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {mdxToMd} from '@truenine/md-compiler' -import {parseMarkdown} from '@truenine/md-compiler/markdown' -import JSON5 from 'json5' - -export interface PromptArtifact { - readonly rawMdx: string - readonly parsed: ParsedMarkdown - readonly content: string - readonly metadata: Record - readonly lastModified: Date -} - -export interface ReadPromptArtifactOptions { - readonly mode: 'source' | 'dist' - readonly globalScope?: MdxGlobalScope | undefined - readonly rawMdx?: string | undefined - readonly lastModified?: Date | undefined -} - -export interface CompileRawPromptArtifactOptions { - readonly filePath: string - readonly globalScope?: MdxGlobalScope | undefined - readonly rawMdx: string - readonly cacheMtimeMs?: number | undefined -} - -export interface RawPromptCompilation { - readonly content: string - readonly metadata: Record -} - -interface CachedPromptArtifactValue { - readonly artifact: PromptArtifact - readonly stamp: number -} - -const promptArtifactCache = new Map>() -const rawPromptCompilationCache = new Map>() -const EXPORT_DEFAULT_PREFIX_PATTERN = /^export\s+default\s*/u - -function normalizeForCache(value: unknown): unknown { - if (value == null || typeof value !== 'object') { - return value - } - - if (Array.isArray(value)) { - return value.map(normalizeForCache) - } - - const normalizedEntries = Object.entries(value as Record) - .sort(([leftKey], [rightKey]) => leftKey.localeCompare(rightKey)) - .map(([key, nestedValue]) => [key, normalizeForCache(nestedValue)] as const) - return Object.fromEntries(normalizedEntries) -} - -function stableSerialize(value: unknown): string { - return JSON.stringify(normalizeForCache(value)) -} - -function buildArtifactCacheKey( - filePath: string, - stamp: number, - options: ReadPromptArtifactOptions -): string { - return [ - path.resolve(filePath), - stamp, - options.mode, - stableSerialize(options.globalScope ?? {}) - ].join('::') -} - -function buildRawCompilationCacheKey( - options: CompileRawPromptArtifactOptions -): string { - return [ - path.resolve(options.filePath), - options.cacheMtimeMs ?? options.rawMdx.length, - stableSerialize(options.globalScope ?? {}), - stableSerialize(options.rawMdx) - ].join('::') -} - -function trimMetadataPrefix(content: string): string { - return content.replace(/^\s*;?\s*/u, '').trim() -} - -function isRecord(value: unknown): value is Record { - return value != null && typeof value === 'object' && !Array.isArray(value) -} - -function extractObjectLiteral(source: string, startIndex: number): {value: string, endIndex: number} | null { - if (source[startIndex] !== '{') { - return null - } - - let depth = 0 - let inString: string | undefined - let escaped = false - let inLineComment = false - let inBlockComment = false - - for (let index = startIndex; index < source.length; index++) { - const current = source[index] - const next = source[index + 1] - - if (current == null) { - break - } - - if (inLineComment) { - if (current === '\n') { - inLineComment = false - } - continue - } - - if (inBlockComment) { - if (current === '*' && next === '/') { - inBlockComment = false - index++ - } - continue - } - - if (escaped) { - escaped = false - continue - } - - if (inString != null) { - if (current === '\\') { - escaped = true - continue - } - if (current === inString) { - inString = void 0 - } - continue - } - - if (current === '"' || current === '\'' || current === '`') { - inString = current - continue - } - - if (current === '/' && next === '/') { - inLineComment = true - index++ - continue - } - - if (current === '/' && next === '*') { - inBlockComment = true - index++ - continue - } - - if (current === '{') { - depth++ - continue - } - - if (current !== '}') { - continue - } - - depth-- - if (depth === 0) { - return { - value: source.slice(startIndex, index + 1), - endIndex: index + 1 - } - } - } - - return null -} - -function tryReadFastDistArtifact( - rawMdx: string -): {content: string, metadata: Record} | null { - const trimmed = rawMdx.trimStart() - - // Frontmatter and plain markdown dist prompts still need mdxToMd because the body - // may contain unresolved MDX expressions or components. - const prefixMatch = EXPORT_DEFAULT_PREFIX_PATTERN.exec(trimmed) - if (prefixMatch == null) return null - - const objectStartIndex = prefixMatch[0].length - const objectLiteral = extractObjectLiteral(trimmed, objectStartIndex) - if (objectLiteral == null) { - return null - } - - let metadata: unknown - try { - metadata = JSON5.parse(objectLiteral.value) - } - catch { - return null - } - - if (!isRecord(metadata)) { - return null - } - - return { - content: trimMetadataPrefix(trimmed.slice(objectLiteral.endIndex)), - metadata - } -} - -async function buildPromptArtifact( - filePath: string, - options: ReadPromptArtifactOptions -): Promise { - const rawMdx = options.rawMdx ?? fs.readFileSync(filePath, 'utf8') - const lastModified = options.lastModified ?? fs.statSync(filePath).mtime - const parsed = parseMarkdown(rawMdx) - - if (options.mode === 'dist') { - const fastDistArtifact = tryReadFastDistArtifact(rawMdx) - if (fastDistArtifact != null) { - return { - rawMdx, - parsed, - content: fastDistArtifact.content, - metadata: fastDistArtifact.metadata, - lastModified - } - } - } - - const compileResult = await mdxToMd(rawMdx, { - globalScope: options.globalScope, - extractMetadata: true, - basePath: path.dirname(filePath), - filePath - }) - - return { - rawMdx, - parsed, - content: compileResult.content, - metadata: compileResult.metadata.fields, - lastModified - } -} - -export async function readPromptArtifact( - filePath: string, - options: ReadPromptArtifactOptions -): Promise { - const lastModified = options.lastModified ?? fs.statSync(filePath).mtime - const stamp = lastModified.getTime() - const cacheKey = buildArtifactCacheKey(filePath, stamp, options) - const cached = promptArtifactCache.get(cacheKey) - if (cached != null) { - return (await cached).artifact - } - - const pendingArtifact = buildPromptArtifact(filePath, { - ...options, - lastModified - }).then(artifact => ({ - artifact, - stamp - })) - promptArtifactCache.set(cacheKey, pendingArtifact) - - try { - return (await pendingArtifact).artifact - } - catch (error) { - promptArtifactCache.delete(cacheKey) - throw error - } -} - -export async function compileRawPromptArtifact( - options: CompileRawPromptArtifactOptions -): Promise { - const cacheKey = buildRawCompilationCacheKey(options) - const cached = rawPromptCompilationCache.get(cacheKey) - if (cached != null) { - return cached - } - - const pendingCompilation = mdxToMd(options.rawMdx, { - globalScope: options.globalScope, - extractMetadata: true, - basePath: path.dirname(options.filePath), - filePath: options.filePath - }).then(result => ({ - content: result.content, - metadata: result.metadata.fields - })) - rawPromptCompilationCache.set(cacheKey, pendingCompilation) - - try { - return await pendingCompilation - } - catch (error) { - rawPromptCompilationCache.delete(cacheKey) - throw error - } -} - -export function clearPromptArtifactCache(): void { - promptArtifactCache.clear() - rawPromptCompilationCache.clear() -} diff --git a/cli/src/plugins/plugin-core/PromptCompilerDiagnostics.test.ts b/cli/src/plugins/plugin-core/PromptCompilerDiagnostics.test.ts deleted file mode 100644 index f4989728..00000000 --- a/cli/src/plugins/plugin-core/PromptCompilerDiagnostics.test.ts +++ /dev/null @@ -1,47 +0,0 @@ -import * as path from 'node:path' -import {UndefinedNamespaceError} from '@truenine/md-compiler/errors' -import {describe, expect, it} from 'vitest' -import { - formatPromptCompilerDiagnostic, - resolveSourcePathForDistFile -} from './PromptCompilerDiagnostics' - -describe('prompt compiler diagnostics', () => { - it('formats prompt-aware compiler diagnostics with dist and src paths', () => { - const error = new UndefinedNamespaceError('TODO', 'TODO', { - filePath: path.join('C:', 'repo', 'aindex', 'dist', 'skills', 'demo', 'examples', 'guide.mdx'), - sourceText: 'never leave placeholders or "{TODO}" markers', - position: { - start: {line: 1, column: 30, offset: 29}, - end: {line: 1, column: 36, offset: 35} - }, - nodeType: 'mdxTextExpression' - }) - - const message = formatPromptCompilerDiagnostic(error, { - operation: 'Failed to compile skill child doc.', - promptKind: 'skill-child-doc', - logicalName: 'demo/examples/guide', - entryDistPath: path.join('C:', 'repo', 'aindex', 'dist', 'skills', 'demo', 'skill.mdx'), - distPath: path.join('C:', 'repo', 'aindex', 'dist', 'skills', 'demo', 'examples', 'guide.mdx'), - srcPath: path.join('C:', 'repo', 'aindex', 'skills', 'demo', 'examples', 'guide.src.mdx') - }) - - expect(message).toContain('prompt kind: skill-child-doc') - expect(message).toContain('logical name: demo/examples/guide') - expect(message).toContain('entry dist file:') - expect(message).toContain('dist file:') - expect(message).toContain('src file:') - expect(message).toContain('location: 1:30-1:36') - expect(message).toContain('source line: never leave placeholders or "{TODO}" markers') - }) - - it('maps nested dist child docs back to src child docs', () => { - const mapped = resolveSourcePathForDistFile(path, path.join('C:', 'repo', 'aindex', 'dist', 'skills', 'demo', 'examples', 'guide.mdx'), { - distRootDir: path.join('C:', 'repo', 'aindex', 'dist', 'skills', 'demo'), - srcRootDir: path.join('C:', 'repo', 'aindex', 'skills', 'demo') - }) - - expect(mapped).toBe(path.join('C:', 'repo', 'aindex', 'skills', 'demo', 'examples', 'guide.src.mdx')) - }) -}) diff --git a/cli/src/plugins/plugin-core/PromptCompilerDiagnostics.ts b/cli/src/plugins/plugin-core/PromptCompilerDiagnostics.ts deleted file mode 100644 index e36ed182..00000000 --- a/cli/src/plugins/plugin-core/PromptCompilerDiagnostics.ts +++ /dev/null @@ -1,65 +0,0 @@ -import {CompilerDiagnosticError, formatCompilerDiagnostic} from '@truenine/md-compiler/errors' - -export interface PromptCompilerDiagnosticContext { - readonly promptKind: string - readonly logicalName: string - readonly distPath?: string | undefined - readonly entryDistPath?: string | undefined - readonly srcPath?: string | undefined - readonly operation?: string | undefined -} - -export interface SourceMappingOptions { - readonly preferredSourcePath?: string | undefined - readonly distRootDir?: string | undefined - readonly srcRootDir?: string | undefined -} - -export function resolveSourcePathForDistFile( - path: typeof import('node:path'), - distFilePath: string | undefined, - options: SourceMappingOptions = {} -): string | undefined { - const {preferredSourcePath, distRootDir, srcRootDir} = options - if (distFilePath == null || distFilePath.length === 0) return preferredSourcePath - if (preferredSourcePath != null && preferredSourcePath.length > 0) return preferredSourcePath - if (distRootDir == null || srcRootDir == null) return void 0 - - const relativePath = path.relative(distRootDir, distFilePath) - if (relativePath.startsWith('..') || path.isAbsolute(relativePath)) return void 0 - - return path.join(srcRootDir, relativePath.replace(/\.mdx$/u, '.src.mdx')) -} - -export function getDiagnosticFilePath(error: unknown): string | undefined { - if (error instanceof CompilerDiagnosticError && error.filePath != null) return error.filePath - if (!(error instanceof Error) || !('filePath' in error)) return void 0 - - const {filePath} = error as Error & {filePath?: unknown} - if (typeof filePath === 'string' && filePath.length > 0) return filePath - return void 0 -} - -export function formatPromptCompilerDiagnostic( - error: unknown, - context: PromptCompilerDiagnosticContext -): string { - const diagnosticFilePath = getDiagnosticFilePath(error) - const distPath = diagnosticFilePath ?? context.distPath - const lines = [ - context.operation ?? 'Prompt compilation failed.', - `prompt kind: ${context.promptKind}`, - `logical name: ${context.logicalName}` - ] - - if (context.entryDistPath != null && context.entryDistPath.length > 0 && context.entryDistPath !== distPath) { - lines.push(`entry dist file: ${context.entryDistPath}`) - } - - if (distPath != null && distPath.length > 0) lines.push(`dist file: ${distPath}`) - lines.push(`src file: ${context.srcPath ?? ''}`) - lines.push('diagnostic:') - lines.push(error instanceof Error ? formatCompilerDiagnostic(error) : String(error)) - - return lines.join('\n') -} diff --git a/cli/src/plugins/plugin-core/PromptIdentity.ts b/cli/src/plugins/plugin-core/PromptIdentity.ts deleted file mode 100644 index f7ccf0ee..00000000 --- a/cli/src/plugins/plugin-core/PromptIdentity.ts +++ /dev/null @@ -1,59 +0,0 @@ -import type {SkillPrompt, SubAgentPrompt} from './types' - -function normalizePromptPath(value: string): string { - return value - .replaceAll('\\', '/') - .replaceAll(/^\/+|\/+$/gu, '') -} - -export function flattenPromptPath(value: string): string { - const normalized = normalizePromptPath(value) - if (normalized.length === 0) return '' - - return normalized - .split('/') - .filter(segment => segment.length > 0) - .join('-') -} - -export function deriveSubAgentIdentity(relativeName: string): { - readonly agentPrefix?: string - readonly agentName: string - readonly canonicalName: string -} { - const normalizedName = normalizePromptPath(relativeName) - const segments = normalizedName - .split('/') - .filter(segment => segment.length > 0) - - const agentName = segments.at(-1) ?? normalizedName - const prefixSegments = segments.slice(0, -1) - const canonicalName = flattenPromptPath(normalizedName) - const agentPrefix = prefixSegments.length > 0 - ? prefixSegments.join('-') - : void 0 - - return { - ...agentPrefix != null && {agentPrefix}, - agentName, - canonicalName: canonicalName.length > 0 ? canonicalName : agentName - } -} - -export function resolveSkillName(skill: Pick): string { - return skill.skillName.trim().length > 0 - ? skill.skillName - : skill.dir.getDirectoryName() -} - -export function resolveSubAgentCanonicalName( - subAgent: Pick -): string { - if (subAgent.canonicalName.trim().length > 0) return subAgent.canonicalName - - const fallback = subAgent.agentPrefix != null && subAgent.agentPrefix.length > 0 - ? `${subAgent.agentPrefix}-${subAgent.agentName}` - : subAgent.agentName - - return flattenPromptPath(fallback) -} diff --git a/cli/src/plugins/plugin-core/PromptTypes.ts b/cli/src/plugins/plugin-core/PromptTypes.ts deleted file mode 100644 index 03de03ff..00000000 --- a/cli/src/plugins/plugin-core/PromptTypes.ts +++ /dev/null @@ -1,184 +0,0 @@ -import type {Root, RootContent} from '@truenine/md-compiler' -import type {ClaudeCodeCLISubAgentColors, CodingAgentTools, FilePathKind, NamingCaseKind, PromptKind, RuleScope} from './enums' -import type {GlobalConfigDirectory} from './OutputTypes' - -/** Common directory representation */ -export interface Path { - readonly pathKind: K - readonly path: string - readonly getDirectoryName: () => string -} - -/** Relative path directory */ -export interface RelativePath extends Path { - readonly basePath: string - getAbsolutePath: () => string -} - -/** Absolute path directory */ -export type AbsolutePath = Path - -/** Root path directory */ -export type RootPath = Path - -export interface FileContent< - C = unknown, - FK extends FilePathKind = FilePathKind.Relative, - F extends Path = RelativePath -> { - content: C - length: number - filePathKind: FK - dir: F - charsetEncoding?: BufferEncoding -} - -/** - * Prompt - */ -export interface Prompt< - T extends PromptKind = PromptKind, - Y extends YAMLFrontMatter = YAMLFrontMatter, - DK extends FilePathKind = FilePathKind.Relative, - D extends Path = RelativePath, - C = unknown -> extends FileContent { - readonly type: T - readonly yamlFrontMatter?: Y - readonly rawFrontMatter?: string - readonly markdownAst?: Root - readonly markdownContents: readonly RootContent[] - readonly dir: D -} - -export interface YAMLFrontMatter extends Record { - readonly namingCase: N -} - -/** - * Series name type - used across multiple prompt types - * Extracted to avoid repetition and ensure consistency - */ -export type SeriName = string | string[] | null - -export interface CommonYAMLFrontMatter extends YAMLFrontMatter { - readonly description: string -} - -export interface ToolAwareYAMLFrontMatter extends CommonYAMLFrontMatter { - readonly allowTools?: (CodingAgentTools | string)[] - readonly argumentHint?: string -} - -/** - * Memory prompt working on project root directory - */ -export interface ProjectRootMemoryPrompt extends Prompt< - PromptKind.ProjectRootMemory, - YAMLFrontMatter, - FilePathKind.Relative, - RootPath -> { - readonly type: PromptKind.ProjectRootMemory -} - -/** - * Memory prompt working on project subdirectory - */ -export interface ProjectChildrenMemoryPrompt extends Prompt { - readonly type: PromptKind.ProjectChildrenMemory - readonly workingChildDirectoryPath: RelativePath -} - -export interface SubAgentYAMLFrontMatter extends ToolAwareYAMLFrontMatter { - readonly model?: string - readonly color?: ClaudeCodeCLISubAgentColors | string - readonly seriName?: SeriName - readonly scope?: RuleScope -} - -export interface CommandYAMLFrontMatter extends ToolAwareYAMLFrontMatter { - readonly seriName?: SeriName - readonly scope?: RuleScope -} // description, argumentHint, allowTools inherited from ToolAwareYAMLFrontMatter - -/** - * Base YAML front matter for all skill types - */ -export interface SkillsYAMLFrontMatter extends CommonYAMLFrontMatter { - readonly name?: string -} - -export interface SkillYAMLFrontMatter extends SkillsYAMLFrontMatter { - readonly allowTools?: (CodingAgentTools | string)[] - readonly keywords?: readonly string[] - readonly displayName?: string - readonly author?: string - readonly version?: string - readonly seriName?: SeriName - readonly scope?: RuleScope -} - -/** - * Codex skill metadata field - * Follows Agent Skills specification: https://agentskills.io/specification - * - * The metadata field is an arbitrary key-value mapping for additional metadata. - * Common fields include displayName, version, author, keywords, etc. - */ -export interface CodexSkillMetadata { - readonly 'short-description'?: string - readonly 'displayName'?: string - readonly 'version'?: string - readonly 'author'?: string - readonly 'keywords'?: readonly string[] - readonly 'category'?: string - readonly 'repository'?: string - readonly [key: string]: unknown -} - -export interface CodexSkillYAMLFrontMatter extends SkillsYAMLFrontMatter { - readonly 'license'?: string - readonly 'compatibility'?: string - readonly 'metadata'?: CodexSkillMetadata - readonly 'allowed-tools'?: string -} - -/** - * Kiro steering file front matter - * @see https://kiro.dev/docs/steering - */ -export interface KiroSteeringYAMLFrontMatter extends YAMLFrontMatter { - readonly inclusion?: 'always' | 'fileMatch' | 'manual' - readonly fileMatchPattern?: string -} - -/** - * Kiro Power POWER.md front matter - * @see https://kiro.dev/docs/powers - */ -export interface KiroPowerYAMLFrontMatter extends SkillsYAMLFrontMatter { - readonly displayName?: string - readonly keywords?: readonly string[] - readonly author?: string -} - -/** - * Rule YAML front matter with glob patterns and scope - */ -export interface RuleYAMLFrontMatter extends CommonYAMLFrontMatter { - readonly globs: readonly string[] - readonly scope?: RuleScope - readonly seriName?: SeriName -} - -/** - * Global memory prompt - * Single output target - */ -export interface GlobalMemoryPrompt extends Prompt< - PromptKind.GlobalMemory -> { - readonly type: PromptKind.GlobalMemory - readonly parentDirectoryPath: GlobalConfigDirectory -} diff --git a/cli/src/plugins/plugin-core/RegistryWriter.ts b/cli/src/plugins/plugin-core/RegistryWriter.ts deleted file mode 100644 index 4e74cd69..00000000 --- a/cli/src/plugins/plugin-core/RegistryWriter.ts +++ /dev/null @@ -1,179 +0,0 @@ -/** - * Registry Configuration Writer - * - * Abstract base class for registry configuration writers. - * Provides common functionality for reading, writing, and merging JSON registry files. - * - * @see Requirements 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 7.1, 7.2 - */ - -import type {ILogger, RegistryData, RegistryOperationResult} from './types' - -import * as fs from 'node:fs' -import * as path from 'node:path' -import {createLogger} from '@truenine/logger' -import { - buildDiagnostic, - buildFileOperationDiagnostic, - diagnosticLines -} from '@/diagnostics' -import {resolveUserPath} from '@/runtime-environment' - -/** - * Abstract base class for registry configuration writers. - * Provides common functionality for reading, writing, and merging JSON registry files. - * - * @template TEntry - The type of entries stored in the registry - * @template TRegistry - The full registry data structure type - * - * @see Requirements 1.1, 1.2, 1.3, 1.7 - */ -export abstract class RegistryWriter< - TEntry, - TRegistry extends RegistryData = RegistryData -> { - protected readonly registryPath: string - - protected readonly log: ILogger - - protected constructor(registryPath: string, logger?: ILogger) { - this.registryPath = this.resolvePath(registryPath) - this.log = logger ?? createLogger(this.constructor.name) - } - - protected resolvePath(p: string): string { - if (p.startsWith('~')) return resolveUserPath(p) - return path.resolve(p) - } - - protected getRegistryDir(): string { - return path.dirname(this.registryPath) - } - - protected ensureRegistryDir(): void { - const dir = this.getRegistryDir() - if (!fs.existsSync(dir)) fs.mkdirSync(dir, {recursive: true}) - } - - read(): TRegistry { - if (!fs.existsSync(this.registryPath)) { - this.log.debug('registry not found', {path: this.registryPath}) - return this.createInitialRegistry() - } - - try { - const content = fs.readFileSync(this.registryPath, 'utf8') - return JSON.parse(content) as TRegistry - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error(buildFileOperationDiagnostic({ - code: 'REGISTRY_READ_FAILED', - title: 'Failed to read registry file', - operation: 'read', - targetKind: 'registry file', - path: this.registryPath, - error: errMsg - })) - return this.createInitialRegistry() - } - } - - protected write(data: TRegistry, dryRun?: boolean): boolean { - const updatedData = { // Update lastUpdated timestamp - ...data, - lastUpdated: new Date().toISOString() - } as TRegistry - - if (dryRun === true) { - this.log.trace({action: 'dryRun', type: 'registry', path: this.registryPath}) - return true - } - - const tempPath = `${this.registryPath}.tmp.${Date.now()}` - - try { - this.ensureRegistryDir() - - const content = JSON.stringify(updatedData, null, 2) // Write to temporary file first - fs.writeFileSync(tempPath, content, 'utf8') - - fs.renameSync(tempPath, this.registryPath) // Atomic rename to replace target - - this.log.trace({action: 'write', type: 'registry', path: this.registryPath}) - return true - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error(buildFileOperationDiagnostic({ - code: 'REGISTRY_WRITE_FAILED', - title: 'Failed to write registry file', - operation: 'write', - targetKind: 'registry file', - path: this.registryPath, - error: errMsg - })) - - try { // Cleanup temp file if it exists - if (fs.existsSync(tempPath)) fs.unlinkSync(tempPath) - } - catch { - } // Ignore cleanup errors - - return false - } - } - - register( - entries: readonly TEntry[], - dryRun?: boolean - ): readonly RegistryOperationResult[] { - const results: RegistryOperationResult[] = [] - - const existing = this.read() // Read existing registry - - const merged = this.merge(existing, entries) // Merge new entries - - const writeSuccess = this.write(merged, dryRun) // Write updated registry - - for (const entry of entries) { // Build results for each entry - const entryName = this.getEntryName(entry) - if (writeSuccess) { - results.push({success: true, entryName}) - if (dryRun === true) this.log.trace({action: 'dryRun', type: 'registerEntry', entryName}) - else this.log.trace({action: 'register', type: 'entry', entryName}) - } else { - results.push({success: false, entryName, error: new Error(`Failed to write registry file`)}) - this.log.error(buildDiagnostic({ - code: 'REGISTRY_ENTRY_REGISTRATION_FAILED', - title: `Failed to register registry entry: ${entryName}`, - rootCause: diagnosticLines( - `tnmsc could not persist the registry entry "${entryName}" because the registry write step failed.` - ), - exactFix: diagnosticLines( - 'Fix the registry path permissions or invalid on-disk state, then rerun tnmsc.' - ), - details: { - entryName, - registryPath: this.registryPath - } - })) - } - } - - return results - } - - protected generateEntryId(prefix?: string): string { - const timestamp = Date.now() - const random = Math.random().toString(36).slice(2, 8) - const id = `${timestamp}-${random}` - return prefix != null ? `${prefix}-${id}` : id - } - - protected abstract getEntryName(entry: TEntry): string - - protected abstract merge(existing: TRegistry, entries: readonly TEntry[]): TRegistry - - protected abstract createInitialRegistry(): TRegistry -} diff --git a/cli/src/plugins/plugin-core/constants.ts b/cli/src/plugins/plugin-core/constants.ts deleted file mode 100644 index 63078971..00000000 --- a/cli/src/plugins/plugin-core/constants.ts +++ /dev/null @@ -1,113 +0,0 @@ -import type {UserConfigFile} from './ConfigTypes.schema' - -export const PathPlaceholders = { - USER_HOME: '~', - WORKSPACE: '$WORKSPACE' -} as const - -type DefaultUserConfig = Readonly>> -export const DEFAULT_USER_CONFIG = {} as DefaultUserConfig - -export const PLUGIN_NAMES = { - AgentsOutput: 'AgentsOutputPlugin', - GeminiCLIOutput: 'GeminiCLIOutputPlugin', - CursorOutput: 'CursorOutputPlugin', - WindsurfOutput: 'WindsurfOutputPlugin', - ClaudeCodeCLIOutput: 'ClaudeCodeCLIOutputPlugin', - KiroIDEOutput: 'KiroCLIOutputPlugin', - OpencodeCLIOutput: 'OpencodeCLIOutputPlugin', - OpenAICodexCLIOutput: 'CodexCLIOutputPlugin', - DroidCLIOutput: 'DroidCLIOutputPlugin', - WarpIDEOutput: 'WarpIDEOutputPlugin', - TraeIDEOutput: 'TraeIDEOutputPlugin', - TraeCNIDEOutput: 'TraeCNIDEOutputPlugin', - QoderIDEOutput: 'QoderIDEPluginOutputPlugin', - JetBrainsCodeStyleOutput: 'JetBrainsIDECodeStyleConfigOutputPlugin', - JetBrainsAICodexOutput: 'JetBrainsAIAssistantCodexOutputPlugin', - AgentSkillsCompactOutput: 'GenericSkillsOutputPlugin', - GitExcludeOutput: 'GitExcludeOutputPlugin', - ReadmeOutput: 'ReadmeMdConfigFileOutputPlugin', - VSCodeOutput: 'VisualStudioCodeIDEConfigOutputPlugin', - ZedOutput: 'ZedIDEConfigOutputPlugin', - EditorConfigOutput: 'EditorConfigOutputPlugin', - AntigravityOutput: 'AntigravityOutputPlugin' -} as const - -export type PluginName = (typeof PLUGIN_NAMES)[keyof typeof PLUGIN_NAMES] - -export const WORKSPACE_ROOT_PROJECT_NAME = '__workspace__' - -/** - * Constants for output plugins. - */ -export const OutputFileNames = { - SKILL: 'SKILL.md', - CURSOR_GLOBAL_RULE: 'global.mdc', - CURSOR_PROJECT_RULE: 'always.md', - MCP_CONFIG: 'mcp.json', - CLAUDE_MEMORY: 'CLAUDE.md', - WINDSURF_GLOBAL_RULE: 'global_rules.md' -} as const - -export const OutputPrefixes = { - RULE: 'rule-', - CHILD_RULE: 'glob-' -} as const - -export const OutputSubdirectories = { - RULES: 'rules', - COMMANDS: 'commands', - SKILLS: 'skills', - AGENTS: 'agents', - CURSOR_SKILLS: 'skills-cursor' -} as const - -export const FrontMatterFields = { - ALWAYS_APPLY: 'alwaysApply', - GLOBS: 'globs', - DESCRIPTION: 'description', - NAME: 'name', - TRIGGER: 'trigger' -} as const - -export const FileExtensions = { - MD: '.md', - MDC: '.mdc', - MDX: '.mdx', - JSON: '.json' -} as const - -export const SourcePromptExtensions = { - PRIMARY: '.src.mdx' -} as const - -export const SourcePromptFileExtensions = [SourcePromptExtensions.PRIMARY] as const - -export const SourceLocaleExtensions = { - zh: SourcePromptFileExtensions, - en: FileExtensions.MDX -} as const - -export function hasSourcePromptExtension(fileName: string): boolean { - return SourcePromptFileExtensions.some(extension => fileName.endsWith(extension)) -} - -export const GlobalConfigDirs = { - CURSOR: '.cursor', - CLAUDE: '.claude', - WINDSURF: '.codeium/windsurf', - WINDSURF_RULES: '.windsurf' -} as const - -export const IgnoreFiles = { - CURSOR: '.cursorignore', - WINDSURF: '.codeiumignore' -} as const - -export const PreservedSkills = { - CURSOR: new Set(['create-rule', 'create-skill', 'create-subagent', 'migrate-to-skills', 'update-cursor-settings']) -} as const - -export const ToolPresets = { - CLAUDE_CODE: 'claudeCode' -} as const diff --git a/cli/src/plugins/plugin-core/enums.ts b/cli/src/plugins/plugin-core/enums.ts deleted file mode 100644 index d146d369..00000000 --- a/cli/src/plugins/plugin-core/enums.ts +++ /dev/null @@ -1,53 +0,0 @@ -export enum PluginKind { - Output = 'output' -} - -export enum PromptKind { - GlobalMemory = 'globalMemory', - ProjectRootMemory = 'projectRootMemory', - ProjectChildrenMemory = 'projectChildrenMemory', - Command = 'command', - SubAgent = 'subAgent', - Skill = 'skill', - SkillChildDoc = 'skillChildDoc', - SkillResource = 'skillResource', - SkillMcpConfig = 'skillMcpConfig', - Readme = 'readme', - Rule = 'rule' -} - -export type RuleScope = 'project' | 'global' - -export enum FilePathKind { - Relative = 'relative', - Absolute = 'absolute', - Root = 'root' -} - -export enum IDEKind { - VSCode = 'vscode', - Zed = 'zed', - IntellijIDEA = 'intellijIdea', - Git = 'git', - EditorConfig = 'editorconfig', - Original = 'original' -} - -export enum NamingCaseKind { - CamelCase = 'camelCase', - PascalCase = 'pascalCase', - SnakeCase = 'snakeCase', - KebabCase = 'kebabCase', - UpperCase = 'upperCase', - LowerCase = 'lowerCase', - Original = 'original' -} - -export enum GlobalConfigDirectoryType { - UserHome = 'userHome', - External = 'external' -} - -export type CodingAgentTools = string - -export type ClaudeCodeCLISubAgentColors = string diff --git a/cli/src/plugins/plugin-core/filters.ts b/cli/src/plugins/plugin-core/filters.ts deleted file mode 100644 index 0cec8122..00000000 --- a/cli/src/plugins/plugin-core/filters.ts +++ /dev/null @@ -1,261 +0,0 @@ -import type {ProjectConfig, RulePrompt, SeriName} from './types' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {getNativeBinding} from '@/core/native-binding' - -interface SeriesFilterFns { - readonly resolveEffectiveIncludeSeries: (topLevel?: readonly string[], typeSpecific?: readonly string[]) => string[] - readonly matchesSeries: (seriName: string | readonly string[] | null | undefined, effectiveIncludeSeries: readonly string[]) => boolean - readonly resolveSubSeries: ( - topLevel?: Readonly>, - typeSpecific?: Readonly> - ) => Record -} - -let seriesFilterFnsCache: SeriesFilterFns | undefined - -function getSeriesFilterFns(): SeriesFilterFns { - if (seriesFilterFnsCache != null) return seriesFilterFnsCache - - const candidate = getNativeBinding() - if (candidate == null) { - throw new TypeError('Native series-filter binding is required. Build or install the Rust NAPI package before running tnmsc.') - } - if ( - typeof candidate.matchesSeries !== 'function' - || typeof candidate.resolveEffectiveIncludeSeries !== 'function' - || typeof candidate.resolveSubSeries !== 'function' - ) { - throw new TypeError('Native series-filter binding is incomplete. Rebuild the Rust NAPI package before running tnmsc.') - } - seriesFilterFnsCache = candidate - return candidate -} - -function resolveEffectiveIncludeSeries(topLevel?: readonly string[], typeSpecific?: readonly string[]): string[] { - return getSeriesFilterFns().resolveEffectiveIncludeSeries(topLevel, typeSpecific) -} - -function matchesSeries(seriName: string | readonly string[] | null | undefined, effectiveIncludeSeries: readonly string[]): boolean { - return getSeriesFilterFns().matchesSeries(seriName, effectiveIncludeSeries) -} - -function resolveSubSeries( - topLevel?: Readonly>, - typeSpecific?: Readonly> -): Record { - return getSeriesFilterFns().resolveSubSeries(topLevel, typeSpecific) -} - -/** - * Interface for items that can be filtered by series name - */ -export interface SeriesFilterable { - readonly seriName?: SeriName -} - -/** - * Configuration path types for project config lookup - */ -export type FilterConfigPath = 'commands' | 'skills' | 'subAgents' | 'rules' - -export function filterByProjectConfig( - items: readonly T[], - projectConfig: ProjectConfig | undefined, - configPath: FilterConfigPath -): readonly T[] { - const effectiveSeries = resolveEffectiveIncludeSeries(projectConfig?.includeSeries, projectConfig?.[configPath]?.includeSeries) - return items.filter(item => matchesSeries(item.seriName, effectiveSeries)) -} - -export function normalizeSubdirPath(subdir: string): string { - let normalized = subdir.replaceAll(/\.\/+/g, '') - normalized = normalized.replaceAll(/\/+$/g, '') - return normalized -} - -function smartConcatGlob(prefix: string, glob: string): string { - if (glob.startsWith('**/')) return `${prefix}/${glob}` - if (glob.startsWith('*')) return `${prefix}/**/${glob}` - return `${prefix}/${glob}` -} - -function extractPrefixAndBaseGlob(glob: string, prefixes: readonly string[]): {prefix: string | null, baseGlob: string} { - for (const prefix of prefixes) { - const normalizedPrefix = prefix.replaceAll(/\/+$/g, '') - const patterns = [ - {prefix: normalizedPrefix, pattern: `${normalizedPrefix}/`}, - {prefix: normalizedPrefix, pattern: `${normalizedPrefix}\\`} - ] - for (const {prefix: p, pattern} of patterns) { - if (glob.startsWith(pattern)) return {prefix: p, baseGlob: glob.slice(pattern.length)} - } - if (glob === normalizedPrefix) return {prefix: normalizedPrefix, baseGlob: '**/*'} - } - return {prefix: null, baseGlob: glob} -} - -export function applySubSeriesGlobPrefix(rules: readonly RulePrompt[], projectConfig: ProjectConfig | undefined): readonly RulePrompt[] { - const subSeries = resolveSubSeries(projectConfig?.subSeries, projectConfig?.rules?.subSeries) - if (Object.keys(subSeries).length === 0) return rules - - const normalizedSubSeries: Record = {} - for (const [subdir, seriNames] of Object.entries(subSeries)) { - const normalizedSubdir = normalizeSubdirPath(subdir) - normalizedSubSeries[normalizedSubdir] = seriNames - } - - const allPrefixes = Object.keys(normalizedSubSeries) - - return rules.map(rule => { - if (rule.seriName == null) return rule - - const matchedPrefixes: string[] = [] - for (const [subdir, seriNames] of Object.entries(normalizedSubSeries)) { - const matched = Array.isArray(rule.seriName) ? rule.seriName.some(name => seriNames.includes(name)) : seriNames.includes(rule.seriName) - if (matched) matchedPrefixes.push(subdir) - } - - if (matchedPrefixes.length === 0) return rule - - const newGlobs: string[] = [] - for (const originalGlob of rule.globs) { - const {prefix: existingPrefix, baseGlob} = extractPrefixAndBaseGlob(originalGlob, allPrefixes) - - if (existingPrefix != null) newGlobs.push(originalGlob) - - for (const prefix of matchedPrefixes) { - if (prefix === existingPrefix) continue - const newGlob = smartConcatGlob(prefix, baseGlob) - if (!newGlobs.includes(newGlob)) newGlobs.push(newGlob) - } - } - - return { - ...rule, - globs: newGlobs - } - }) -} - -/** - * Resolves the actual `.git/info` directory for a given project path. - * Handles both regular git repos (`.git` is a directory) and submodules/worktrees (`.git` is a file with `gitdir:` pointer). - * Returns `null` if no valid git info directory can be resolved. - */ -export function resolveGitInfoDir(projectDir: string): string | null { - const dotGitPath = path.join(projectDir, '.git') - - if (!fs.existsSync(dotGitPath)) return null - - const stat = fs.lstatSync(dotGitPath) - - if (stat.isDirectory()) { - const infoDir = path.join(dotGitPath, 'info') - return infoDir - } - - if (stat.isFile()) { - try { - const content = fs.readFileSync(dotGitPath, 'utf8').trim() - const match = /^gitdir: (.+)$/.exec(content) - if (match?.[1] != null) { - const gitdir = path.resolve(projectDir, match[1]) - return path.join(gitdir, 'info') - } - } catch {} // ignore read errors - } - - return null -} - -/** - * Recursively discovers all `.git` entries (directories or files) under a given root, - * skipping common non-source directories. - * Returns absolute paths of directories containing a `.git` entry. - */ -export function findAllGitRepos(rootDir: string, maxDepth = 5): string[] { - const results: string[] = [] - const SKIP_DIRS = new Set(['node_modules', '.turbo', 'dist', 'build', 'out', '.cache']) - - function walk(dir: string, depth: number): void { - if (depth > maxDepth) return - - let entries: fs.Dirent[] - try { - const raw = fs.readdirSync(dir, {withFileTypes: true}) - if (!Array.isArray(raw)) return - entries = raw - } catch { - return - } - - const hasGit = entries.some(e => e.name === '.git') - if (hasGit && dir !== rootDir) results.push(dir) - - for (const entry of entries) { - if (!entry.isDirectory()) continue - if (entry.name === '.git' || SKIP_DIRS.has(entry.name)) continue - walk(path.join(dir, entry.name), depth + 1) - } - } - - walk(rootDir, 0) - return results -} - -/** - * Scans `.git/modules/` directory recursively to find all submodule `info/` dirs. - * Handles nested submodules (modules within modules). - * Returns absolute paths of `info/` directories. - */ -export function findGitModuleInfoDirs(dotGitDir: string): string[] { - const modulesDir = path.join(dotGitDir, 'modules') - if (!fs.existsSync(modulesDir)) return [] - - const results: string[] = [] - - function walk(dir: string): void { - let entries: fs.Dirent[] - try { - const raw = fs.readdirSync(dir, {withFileTypes: true}) - if (!Array.isArray(raw)) return - entries = raw - } catch { - return - } - - const hasInfo = entries.some(e => e.name === 'info' && e.isDirectory()) - if (hasInfo) results.push(path.join(dir, 'info')) - - const nestedModules = entries.find(e => e.name === 'modules' && e.isDirectory()) - if (nestedModules == null) return - - let subEntries: fs.Dirent[] - try { - const raw = fs.readdirSync(path.join(dir, 'modules'), {withFileTypes: true}) - if (!Array.isArray(raw)) return - subEntries = raw - } catch { - return - } - for (const sub of subEntries) { - if (sub.isDirectory()) walk(path.join(dir, 'modules', sub.name)) - } - } - - let topEntries: fs.Dirent[] - try { - const raw = fs.readdirSync(modulesDir, {withFileTypes: true}) - if (!Array.isArray(raw)) return results - topEntries = raw - } catch { - return results - } - - for (const entry of topEntries) { - if (entry.isDirectory()) walk(path.join(modulesDir, entry.name)) - } - - return results -} diff --git a/cli/src/plugins/plugin-core/plugin.outputScopes.validation.test.ts b/cli/src/plugins/plugin-core/plugin.outputScopes.validation.test.ts deleted file mode 100644 index 5b002ab5..00000000 --- a/cli/src/plugins/plugin-core/plugin.outputScopes.validation.test.ts +++ /dev/null @@ -1,182 +0,0 @@ -import type {ILogger} from '@truenine/logger' -import type {OutputPlugin, OutputWriteContext} from './plugin' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {FilePathKind, PluginKind} from './enums' -import { - collectAllPluginOutputs, - executeDeclarativeWriteOutputs, - validateOutputScopeOverridesForPlugins -} from './plugin' - -function createMockLogger(): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger -} - -function createMockWriteContext(pluginName: string, topicOverride: Record): OutputWriteContext { - return { - logger: createMockLogger(), - fs, - path, - glob: {} as never, - dryRun: true, - pluginOptions: { - outputScopes: { - plugins: { - [pluginName]: topicOverride - } - } - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Relative, - path: '.', - basePath: '.', - getDirectoryName: () => '.', - getAbsolutePath: () => path.resolve('.') - }, - projects: [] - } - } - } as OutputWriteContext -} - -function createMockOutputPlugin(name: string): OutputPlugin { - return { - type: PluginKind.Output, - name, - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: { - commands: { - scopes: ['global'], - singleScope: true - } - }, - async declareOutputFiles() { - return [] - }, - async convertContent() { - return '' - } - } -} - -function createMultiScopeOutputPlugin(name: string): OutputPlugin { - return { - type: PluginKind.Output, - name, - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: { - commands: { - scopes: ['project', 'global'], - singleScope: false - } - }, - async declareOutputFiles() { - return [] - }, - async convertContent() { - return '' - } - } -} - -function createScopedDeclarationPlugin(name: string): OutputPlugin { - return { - type: PluginKind.Output, - name, - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return [ - {path: path.resolve('tmp/project.txt'), scope: 'project', source: {}}, - {path: path.resolve('tmp/global.txt'), scope: 'global', source: {}} - ] - }, - async convertContent() { - return '' - } - } -} - -describe('outputScopes capability validation', () => { - it('accepts valid topic override', async () => { - const plugin = createMockOutputPlugin('MockOutputPlugin') - const ctx = createMockWriteContext(plugin.name, {commands: 'global'}) - - const result = await executeDeclarativeWriteOutputs([plugin], ctx) - expect(result.has(plugin.name)).toBe(true) - }) - - it('throws when override topic is unsupported by plugin capabilities', async () => { - const plugin = createMockOutputPlugin('MockOutputPlugin') - const ctx = createMockWriteContext(plugin.name, {rules: 'global'}) - - await expect(executeDeclarativeWriteOutputs([plugin], ctx)) - .rejects - .toThrow('does not support topic "rules"') - }) - - it('throws when override scope is not allowed by plugin capabilities', async () => { - const plugin = createMockOutputPlugin('MockOutputPlugin') - const ctx = createMockWriteContext(plugin.name, {commands: 'project'}) - - await expect(executeDeclarativeWriteOutputs([plugin], ctx)) - .rejects - .toThrow('requests unsupported scopes [project]') - }) - - it('applies the same validation in output collection path', async () => { - const plugin = createMockOutputPlugin('MockOutputPlugin') - const ctx = createMockWriteContext(plugin.name, {rules: 'global'}) - - await expect(collectAllPluginOutputs([plugin], ctx)) - .rejects - .toThrow('does not support topic "rules"') - }) - - it('throws for multi-scope selection on single-scope topic', () => { - const plugin = createMockOutputPlugin('MockOutputPlugin') - const ctx = createMockWriteContext(plugin.name, {commands: ['global', 'project']}) - - expect(() => validateOutputScopeOverridesForPlugins([plugin], ctx.pluginOptions)) - .toThrow('is single-scope and cannot request multiple scopes') - }) - - it('accepts multi-scope selection when the topic supports parallel scopes', () => { - const plugin = createMultiScopeOutputPlugin('MultiScopeOutputPlugin') - const ctx = createMockWriteContext(plugin.name, {commands: ['project', 'global']}) - - expect(() => validateOutputScopeOverridesForPlugins([plugin], ctx.pluginOptions)).not.toThrow() - }) - - it('rejects workspace as an unsupported override scope', () => { - const plugin = createMultiScopeOutputPlugin('MultiScopeOutputPlugin') - const ctx = createMockWriteContext(plugin.name, {commands: 'workspace'}) - - expect(() => validateOutputScopeOverridesForPlugins([plugin], ctx.pluginOptions)) - .toThrow('requests unsupported scopes [workspace]') - }) - - it('classifies project and global declarations during output collection', async () => { - const plugin = createScopedDeclarationPlugin('ScopedDeclarationPlugin') - const ctx = createMockWriteContext(plugin.name, {}) - - const outputs = await collectAllPluginOutputs([plugin], ctx) - - expect(outputs.projectFiles).toEqual([path.resolve('tmp/project.txt')]) - expect(outputs.globalFiles).toEqual([path.resolve('tmp/global.txt')]) - }) -}) diff --git a/cli/src/plugins/plugin-core/plugin.ts b/cli/src/plugins/plugin-core/plugin.ts deleted file mode 100644 index 9bed4e00..00000000 --- a/cli/src/plugins/plugin-core/plugin.ts +++ /dev/null @@ -1,541 +0,0 @@ -import type {ILogger} from '@truenine/logger' -import type {MdxGlobalScope} from '@truenine/md-compiler/globals' -import type { - AindexConfig, - CleanupProtectionOptions, - CommandSeriesOptions, - FrontMatterOptions, - OutputScopeOptions, - OutputScopeSelection, - PluginOutputScopeTopics, - ProtectionMode, - WindowsOptions -} from './ConfigTypes.schema' -import type {PluginKind} from './enums' -import type { - InputCollectedContext, - OutputCollectedContext, - Project -} from './InputTypes' -import {Buffer} from 'node:buffer' -import * as fs from 'node:fs' -import * as path from 'node:path' - -export type FastGlobType = typeof import('fast-glob') - -/** - * Opaque type for ScopeRegistry. - * Concrete implementation lives in plugin-input-shared. - */ -export interface ScopeRegistryLike { - resolve: (expression: string) => string -} - -export interface DependencyNode { - readonly name: string - readonly log: ILogger - readonly dependsOn?: readonly string[] -} - -export interface Plugin extends DependencyNode { - readonly type: T -} - -export interface PluginContext { - logger: ILogger - fs: typeof import('node:fs') - path: typeof import('node:path') - glob: FastGlobType -} - -export interface InputCapabilityContext extends PluginContext { - readonly userConfigOptions: Required - readonly dependencyContext: Partial - readonly runtimeCommand?: 'execute' | 'dry-run' | 'clean' | 'plugins' - - readonly globalScope?: MdxGlobalScope - - readonly scopeRegistry?: ScopeRegistryLike -} - -export interface InputCapability extends DependencyNode { - collect: (ctx: InputCapabilityContext) => Partial | Promise> -} - -/** - * Capability that can enhance projects after all projects are collected. - * This is useful for capabilities that need to add data to projects - * collected by earlier capabilities. - */ -export interface ProjectEnhancerCapability extends InputCapability { - enhanceProjects: (ctx: InputCapabilityContext, projects: readonly Project[]) => Project[] -} - -export interface OutputRuntimeTargets { - readonly jetbrainsCodexDirs: readonly string[] -} - -/** - * Context for output plugin operations - */ -export interface OutputPluginContext { - readonly logger: ILogger - readonly collectedOutputContext: OutputCollectedContext - readonly pluginOptions?: PluginOptions - readonly runtimeTargets: OutputRuntimeTargets -} - -/** - * Context for output cleaning operations - */ -export interface OutputCleanContext extends OutputPluginContext { - readonly dryRun?: boolean -} - -/** - * Context for output writing operations - */ -export interface OutputWriteContext extends OutputPluginContext { - readonly dryRun?: boolean - - readonly registeredPluginNames?: readonly string[] -} - -/** - * Declarative host-home file that should be mirrored into configured WSL instances. - */ -export interface WslMirrorFileDeclaration { - /** Source path on the Windows host, typically under ~ */ - readonly sourcePath: string - /** Optional label for diagnostics/logging */ - readonly label?: string -} - -/** - * Result of a single write operation - */ -export interface WriteResult { - readonly path: string - readonly success: boolean - readonly skipped?: boolean - readonly error?: Error -} - -/** - * Collected results from write operations - */ -export interface WriteResults { - readonly files: readonly WriteResult[] - readonly dirs: readonly WriteResult[] -} - -/** - * Awaitable type for sync/async flexibility - */ -export type Awaitable = T | Promise - -/** - * Result of executing an input effect. - * Used for preprocessing/cleaning input sources before collection. - */ -export interface InputEffectResult { - /** Whether the effect executed successfully */ - readonly success: boolean - /** Error details if the effect failed */ - readonly error?: Error - /** Description of what the effect did (for logging) */ - readonly description?: string - /** Files that were modified/created */ - readonly modifiedFiles?: readonly string[] - /** Files that were deleted */ - readonly deletedFiles?: readonly string[] -} - -/** - * Context provided to input effect handlers. - * Contains utilities and configuration for effect execution. - */ -export interface InputEffectContext { - /** Logger instance */ - readonly logger: ILogger - /** File system module */ - readonly fs: typeof import('node:fs') - /** Path module */ - readonly path: typeof import('node:path') - /** Glob module for file matching */ - readonly glob: FastGlobType - /** Child process spawn function */ - readonly spawn: typeof import('node:child_process').spawn - /** User configuration options */ - readonly userConfigOptions: Required - /** Resolved workspace directory */ - readonly workspaceDir: string - /** Resolved aindex directory */ - readonly aindexDir: string - /** Whether running in dry-run mode */ - readonly dryRun?: boolean -} - -/** - * Handler function for input effects. - * Receives the effect context and returns an effect result. - */ -export type InputEffectHandler = (ctx: InputEffectContext) => Awaitable - -/** - * Registration entry for an input effect. - */ -export interface InputEffectRegistration { - /** Descriptive name for logging */ - readonly name: string - /** The effect handler function */ - readonly handler: InputEffectHandler - /** Priority for execution order (lower = earlier, default: 0) */ - readonly priority?: number -} - -/** - * Result of resolving base paths from plugin options. - */ -export interface ResolvedBasePaths { - /** The resolved workspace directory path */ - readonly workspaceDir: string - /** The resolved aindex directory path */ - readonly aindexDir: string -} - -/** - * Represents a registered scope entry from a plugin. - */ -export interface PluginScopeRegistration { - /** The namespace name (e.g., 'myPlugin') */ - readonly namespace: string - /** Key-value pairs registered under this namespace */ - readonly values: Record -} - -/** - * Output plugin interface. - * Declarative write model only: - * - Plugins declare target files - * - Plugins convert source metadata to content - * - Core runtime performs all file system operations - */ -export interface OutputPlugin extends Plugin { - readonly declarativeOutput: true - readonly outputCapabilities: OutputPluginCapabilities - - declareOutputFiles: (ctx: OutputWriteContext) => Awaitable - - convertContent: (declaration: OutputFileDeclaration, ctx: OutputWriteContext) => Awaitable - - declareCleanupPaths?: (ctx: OutputCleanContext) => Awaitable - - declareWslMirrorFiles?: (ctx: OutputWriteContext) => Awaitable -} - -/** - * Scope of a declared output file target. - */ -export type OutputDeclarationScope = 'project' | 'global' - -/** - * Supported output scope override topics. - */ -export const OUTPUT_SCOPE_TOPICS = ['prompt', 'rules', 'commands', 'subagents', 'skills', 'mcp'] as const - -/** - * Topic key for output scope override and capability declarations. - */ -export type OutputScopeTopic = (typeof OUTPUT_SCOPE_TOPICS)[number] - -/** - * Capability declaration for one output topic. - * - scopes: allowed source scopes for selection/override - * - singleScope: whether the topic resolves to a single scope by priority - */ -export interface OutputTopicCapability { - readonly scopes: readonly OutputDeclarationScope[] - readonly singleScope: boolean -} - -/** - * Per-plugin capability matrix for output topics. - */ -export type OutputPluginCapabilities = Partial> - -/** - * Declarative output file declaration. - * Output plugins only declare target paths and source metadata. - * Core runtime performs all file system write operations. - */ -export interface OutputFileDeclaration { - /** Absolute target file path */ - readonly path: string - /** Target scope classification for cleanup/routing */ - readonly scope?: OutputDeclarationScope - /** Plugin-defined source descriptor for content conversion */ - readonly source: unknown - /** Optional existing-file policy */ - readonly ifExists?: 'overwrite' | 'skip' | 'error' - /** Optional label for logging */ - readonly label?: string -} - -/** - * Scope of declarative cleanup targets. - */ -export type OutputCleanupScope = OutputDeclarationScope | 'xdgConfig' - -/** - * Kind of cleanup target. - */ -export type OutputCleanupTargetKind = 'file' | 'directory' | 'glob' - -/** - * Declarative cleanup target. - */ -export interface OutputCleanupPathDeclaration { - /** Absolute path or glob pattern */ - readonly path: string - /** Target kind */ - readonly kind: OutputCleanupTargetKind - /** Optional basename exclusions when expanding delete globs */ - readonly excludeBasenames?: readonly string[] - /** Protection mode to apply when used in protect declarations */ - readonly protectionMode?: ProtectionMode - /** Optional scope label for logging/trace */ - readonly scope?: OutputCleanupScope - /** Optional label for diagnostics */ - readonly label?: string -} - -/** - * Optional cleanup declaration set for one output plugin. - */ -export interface OutputCleanupDeclarations { - /** Paths/patterns that should be cleaned */ - readonly delete?: readonly OutputCleanupPathDeclaration[] - /** Paths/patterns that must be protected from cleanup */ - readonly protect?: readonly OutputCleanupPathDeclaration[] - /** Glob ignore patterns when expanding delete/protect globs */ - readonly excludeScanGlobs?: readonly string[] -} - -function isNodeBufferLike(value: unknown): value is Buffer { - return Buffer.isBuffer(value) -} - -function normalizeScopeSelection(selection: OutputScopeSelection): readonly OutputDeclarationScope[] { - if (typeof selection === 'string') return [selection] - - const unique: OutputDeclarationScope[] = [] - for (const scope of selection) { - if (!unique.includes(scope)) unique.push(scope) - } - return unique -} - -function getPluginScopeOverrides( - pluginName: string, - pluginOptions?: PluginOptions -): PluginOutputScopeTopics | undefined { - return pluginOptions?.outputScopes?.plugins?.[pluginName] -} - -export function validateOutputPluginCapabilities(plugin: OutputPlugin): void { - for (const topic of OUTPUT_SCOPE_TOPICS) { - const capability = plugin.outputCapabilities[topic] - if (capability == null) continue - if (capability.scopes.length === 0) throw new Error(`Plugin ${plugin.name} declares empty scopes for topic "${topic}"`) - } -} - -export function validateOutputScopeOverridesForPlugin( - plugin: OutputPlugin, - pluginOptions?: PluginOptions -): void { - const overrides = getPluginScopeOverrides(plugin.name, pluginOptions) - if (overrides == null) return - - for (const topic of OUTPUT_SCOPE_TOPICS) { - const requestedSelection = overrides[topic] - if (requestedSelection == null) continue - - const capability = plugin.outputCapabilities[topic] - if (capability == null) { - throw new Error( - `Invalid outputScopes configuration: outputScopes.plugins.${plugin.name}.${topic} is set, but plugin ${plugin.name} does not support topic "${topic}".` - ) - } - - const requestedScopes = normalizeScopeSelection(requestedSelection) - if (capability.singleScope && requestedScopes.length > 1) { - const requested = requestedScopes.join(', ') - throw new Error( - `Invalid outputScopes configuration: outputScopes.plugins.${plugin.name}.${topic} is single-scope and cannot request multiple scopes [${requested}].` - ) - } - - const allowedScopes = new Set(capability.scopes) - const unsupportedScopes = requestedScopes.filter(scope => !allowedScopes.has(scope)) - - if (unsupportedScopes.length > 0) { - const allowed = capability.scopes.join(', ') - const requested = unsupportedScopes.join(', ') - throw new Error( - `Invalid outputScopes configuration: outputScopes.plugins.${plugin.name}.${topic} requests unsupported scopes [${requested}]. Allowed scopes: [${allowed}].` - ) - } - } -} - -export function validateOutputScopeOverridesForPlugins( - plugins: readonly OutputPlugin[], - pluginOptions?: PluginOptions -): void { - for (const plugin of plugins) { - validateOutputPluginCapabilities(plugin) - validateOutputScopeOverridesForPlugin(plugin, pluginOptions) - } -} - -export async function collectOutputDeclarations( - plugins: readonly OutputPlugin[], - ctx: OutputWriteContext -): Promise> { - validateOutputScopeOverridesForPlugins(plugins, ctx.pluginOptions) - - const declarationEntries = await Promise.all( - plugins.map(async plugin => [plugin, await plugin.declareOutputFiles(ctx)] as const) - ) - - return new Map(declarationEntries) -} - -/** - * Execute declarative write operations for output plugins. - * Core runtime owns file system writes; plugins only declare and convert content. - */ -export async function executeDeclarativeWriteOutputs( - plugins: readonly OutputPlugin[], - ctx: OutputWriteContext, - predeclaredOutputs?: ReadonlyMap -): Promise> { - const results = new Map() - const outputDeclarations = predeclaredOutputs ?? await collectOutputDeclarations(plugins, ctx) - - for (const plugin of plugins) { - const declarations = outputDeclarations.get(plugin) ?? [] - const fileResults: WriteResult[] = [] - - for (const declaration of declarations) { - if (ctx.dryRun === true) { - fileResults.push({path: declaration.path, success: true, skipped: false}) - continue - } - - try { - const parentDir = path.dirname(declaration.path) - fs.mkdirSync(parentDir, {recursive: true}) - - if (declaration.ifExists === 'skip' && fs.existsSync(declaration.path)) { - fileResults.push({path: declaration.path, success: true, skipped: true}) - continue - } - - if (declaration.ifExists === 'error' && fs.existsSync(declaration.path)) throw new Error(`Refusing to overwrite existing file: ${declaration.path}`) - - const content = await plugin.convertContent(declaration, ctx) - isNodeBufferLike(content) - ? fs.writeFileSync(declaration.path, content) - : fs.writeFileSync(declaration.path, content, 'utf8') - fileResults.push({path: declaration.path, success: true}) - } - catch (error) { - fileResults.push({path: declaration.path, success: false, error: error as Error}) - } - } - - const pluginResult: WriteResults = {files: fileResults, dirs: []} - results.set(plugin.name, pluginResult) - } - - return results -} - -/** - * Collected outputs from all plugins. - * Used by the clean command to gather all artifacts for cleanup. - */ -export interface CollectedOutputs { - readonly projectDirs: readonly string[] - readonly projectFiles: readonly string[] - readonly globalDirs: readonly string[] - readonly globalFiles: readonly string[] -} - -/** - * Collect all outputs from all registered output plugins. - * This is the main entry point for the clean command. - */ -export async function collectAllPluginOutputs( - plugins: readonly OutputPlugin[], - ctx: OutputPluginContext, - predeclaredOutputs?: ReadonlyMap -): Promise { - const projectDirs: string[] = [] - const projectFiles: string[] = [] - const globalDirs: string[] = [] - const globalFiles: string[] = [] - - const declarationGroups = predeclaredOutputs != null - ? [...predeclaredOutputs.values()] - : Array.from( - await collectOutputDeclarations(plugins, {...ctx, dryRun: true}), - ([, declarations]) => declarations - ) - - for (const declarations of declarationGroups) { - for (const declaration of declarations) { - if (declaration.scope === 'global') globalFiles.push(declaration.path) - else projectFiles.push(declaration.path) - } - } - - return { - projectDirs, - projectFiles, - globalDirs, - globalFiles - } -} - -/** - * Configuration to be processed by plugin.config.ts - * Interpreted by plugin system as collection context - * Path placeholder `~` resolves to the user home directory. - * - * @see InputCollectedContext - Input-side collected context - * @see OutputCollectedContext - Output-side collected context - */ -export interface PluginOptions { - readonly version?: string - - readonly workspaceDir?: string - - readonly aindex?: AindexConfig - - readonly commandSeriesOptions?: CommandSeriesOptions - - readonly outputScopes?: OutputScopeOptions - - readonly frontMatter?: FrontMatterOptions - - readonly cleanupProtection?: CleanupProtectionOptions - - readonly windows?: WindowsOptions - - plugins?: readonly (InputCapability | OutputPlugin)[] - logLevel?: 'trace' | 'debug' | 'info' | 'warn' | 'error' -} diff --git a/cli/src/plugins/plugin-core/scopePolicy.test.ts b/cli/src/plugins/plugin-core/scopePolicy.test.ts deleted file mode 100644 index 34b83aa9..00000000 --- a/cli/src/plugins/plugin-core/scopePolicy.test.ts +++ /dev/null @@ -1,50 +0,0 @@ -import {describe, expect, it} from 'vitest' -import {resolveTopicScopes} from './scopePolicy' - -describe('resolveTopicScopes', () => { - it('selects highest available scope for single-scope topics', () => { - const result = resolveTopicScopes({ - defaultScopes: ['project', 'global'], - supportedScopes: ['project', 'global'], - singleScope: true, - availableScopes: ['project', 'global'] - }) - - expect(result).toEqual(['project']) - }) - - it('respects requested scope when provided', () => { - const result = resolveTopicScopes({ - requestedScopes: ['global'], - defaultScopes: ['project', 'global'], - supportedScopes: ['project', 'global'], - singleScope: true, - availableScopes: ['project', 'global'] - }) - - expect(result).toEqual(['global']) - }) - - it('returns prioritized multi-scope list for multi-scope topics', () => { - const result = resolveTopicScopes({ - requestedScopes: ['global', 'project'], - defaultScopes: ['project', 'global'], - supportedScopes: ['project', 'global'], - singleScope: false - }) - - expect(result).toEqual(['project', 'global']) - }) - - it('returns empty when requested scope is unsupported', () => { - const result = resolveTopicScopes({ - requestedScopes: ['project'], - defaultScopes: ['project'], - supportedScopes: ['global'], - singleScope: true, - availableScopes: ['project', 'global'] - }) - - expect(result).toEqual([]) - }) -}) diff --git a/cli/src/plugins/plugin-core/scopePolicy.ts b/cli/src/plugins/plugin-core/scopePolicy.ts deleted file mode 100644 index ca31a6c4..00000000 --- a/cli/src/plugins/plugin-core/scopePolicy.ts +++ /dev/null @@ -1,73 +0,0 @@ -import type {OutputDeclarationScope} from './plugin' - -export const DEFAULT_SCOPE_PRIORITY: readonly OutputDeclarationScope[] = ['project', 'global'] as const - -export type ScopeSelectionInput = OutputDeclarationScope | readonly OutputDeclarationScope[] | undefined - -function normalizeSelection(selection: ScopeSelectionInput): OutputDeclarationScope[] { - if (selection == null) return [] - if (typeof selection === 'string') return [selection] - const unique: OutputDeclarationScope[] = [] - for (const scope of selection) { - if (!unique.includes(scope)) unique.push(scope) - } - return unique -} - -function sortByPriority( - scopes: readonly OutputDeclarationScope[], - priority: readonly OutputDeclarationScope[] -): OutputDeclarationScope[] { - const priorityIndex = new Map() - for (const [index, scope] of priority.entries()) priorityIndex.set(scope, index) - - return [...scopes].sort((a, b) => { - const ia = priorityIndex.get(a) ?? Number.MAX_SAFE_INTEGER - const ib = priorityIndex.get(b) ?? Number.MAX_SAFE_INTEGER - return ia - ib - }) -} - -export interface ResolveTopicScopesOptions { - readonly requestedScopes?: ScopeSelectionInput - readonly defaultScopes: readonly OutputDeclarationScope[] - readonly supportedScopes: readonly OutputDeclarationScope[] - readonly singleScope: boolean - readonly availableScopes?: readonly OutputDeclarationScope[] - readonly priority?: readonly OutputDeclarationScope[] -} - -export function resolveTopicScopes( - options: ResolveTopicScopesOptions -): readonly OutputDeclarationScope[] { - const { - requestedScopes, - defaultScopes, - supportedScopes, - singleScope, - availableScopes, - priority = DEFAULT_SCOPE_PRIORITY - } = options - - const requested = normalizeSelection(requestedScopes) - const defaults = normalizeSelection(defaultScopes) - const supported = new Set(normalizeSelection(supportedScopes)) - - const base = requested.length > 0 ? requested : defaults - const candidates = base.filter(scope => supported.has(scope)) - if (candidates.length === 0) return [] - - const prioritized = sortByPriority(candidates, priority) - - if (!singleScope) return prioritized - - if (availableScopes != null && availableScopes.length > 0) { - const available = new Set(availableScopes) - const matched = prioritized.find(scope => available.has(scope)) - if (matched == null) return [] - return [matched] - } - const [first] = prioritized - if (first == null) return [] - return [first] -} diff --git a/cli/src/plugins/plugin-core/types.ts b/cli/src/plugins/plugin-core/types.ts deleted file mode 100644 index 3a766dfe..00000000 --- a/cli/src/plugins/plugin-core/types.ts +++ /dev/null @@ -1,39 +0,0 @@ -export * from './AindexConfigDefaults' -export * from './AindexTypes' -export * from './ConfigTypes.schema' -export * from './enums' -export * from './ExportMetadataTypes' -export * from './InputTypes' -export * from './OutputTypes' -export * from './plugin' -export * from './PromptTypes' -export type { - DiagnosticLines, - ILogger, - LoggerDiagnosticInput, - LoggerDiagnosticRecord, - LogLevel -} from '@truenine/logger' - -export class MissingDependencyError extends Error { - readonly nodeName: string - - readonly missingDependency: string - - constructor(nodeName: string, missingDependency: string) { - super(`Node "${nodeName}" depends on missing dependency "${missingDependency}"`) - this.name = 'MissingDependencyError' - this.nodeName = nodeName - this.missingDependency = missingDependency - } -} - -export class CircularDependencyError extends Error { - readonly cyclePath: readonly string[] - - constructor(cyclePath: readonly string[]) { - super(`Circular dependency detected: ${cyclePath.join(' -> ')}`) - this.name = 'CircularDependencyError' - this.cyclePath = [...cyclePath] - } -} diff --git a/cli/src/plugins/plugin-cursor.ts b/cli/src/plugins/plugin-cursor.ts deleted file mode 100644 index 4c94c1bb..00000000 --- a/cli/src/plugins/plugin-cursor.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - CursorOutputPlugin -} from './CursorOutputPlugin' diff --git a/cli/src/plugins/plugin-droid-cli.ts b/cli/src/plugins/plugin-droid-cli.ts deleted file mode 100644 index 040d09e7..00000000 --- a/cli/src/plugins/plugin-droid-cli.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - DroidCLIOutputPlugin -} from './DroidCLIOutputPlugin' diff --git a/cli/src/plugins/plugin-editorconfig.ts b/cli/src/plugins/plugin-editorconfig.ts deleted file mode 100644 index 189999e5..00000000 --- a/cli/src/plugins/plugin-editorconfig.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - EditorConfigOutputPlugin -} from './EditorConfigOutputPlugin' diff --git a/cli/src/plugins/plugin-gemini-cli.ts b/cli/src/plugins/plugin-gemini-cli.ts deleted file mode 100644 index 4a330a0d..00000000 --- a/cli/src/plugins/plugin-gemini-cli.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - GeminiCLIOutputPlugin -} from './GeminiCLIOutputPlugin' diff --git a/cli/src/plugins/plugin-git-exclude.ts b/cli/src/plugins/plugin-git-exclude.ts deleted file mode 100644 index b4de77a1..00000000 --- a/cli/src/plugins/plugin-git-exclude.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - GitExcludeOutputPlugin -} from './GitExcludeOutputPlugin' diff --git a/cli/src/plugins/plugin-jetbrains-ai-codex.ts b/cli/src/plugins/plugin-jetbrains-ai-codex.ts deleted file mode 100644 index 0a3c6461..00000000 --- a/cli/src/plugins/plugin-jetbrains-ai-codex.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - JetBrainsAIAssistantCodexOutputPlugin -} from './JetBrainsAIAssistantCodexOutputPlugin' diff --git a/cli/src/plugins/plugin-jetbrains-codestyle.ts b/cli/src/plugins/plugin-jetbrains-codestyle.ts deleted file mode 100644 index 768102b3..00000000 --- a/cli/src/plugins/plugin-jetbrains-codestyle.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - JetBrainsIDECodeStyleConfigOutputPlugin -} from './JetBrainsIDECodeStyleConfigOutputPlugin' diff --git a/cli/src/plugins/plugin-openai-codex-cli.ts b/cli/src/plugins/plugin-openai-codex-cli.ts deleted file mode 100644 index f1affd58..00000000 --- a/cli/src/plugins/plugin-openai-codex-cli.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - CodexCLIOutputPlugin -} from './CodexCLIOutputPlugin' diff --git a/cli/src/plugins/plugin-opencode-cli.ts b/cli/src/plugins/plugin-opencode-cli.ts deleted file mode 100644 index 7ce39288..00000000 --- a/cli/src/plugins/plugin-opencode-cli.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - OpencodeCLIOutputPlugin -} from './OpencodeCLIOutputPlugin' diff --git a/cli/src/plugins/plugin-qoder-ide.ts b/cli/src/plugins/plugin-qoder-ide.ts deleted file mode 100644 index 4573a43c..00000000 --- a/cli/src/plugins/plugin-qoder-ide.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - QoderIDEPluginOutputPlugin -} from './QoderIDEPluginOutputPlugin' diff --git a/cli/src/plugins/plugin-readme.ts b/cli/src/plugins/plugin-readme.ts deleted file mode 100644 index e299d8c0..00000000 --- a/cli/src/plugins/plugin-readme.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - ReadmeMdConfigFileOutputPlugin -} from './ReadmeMdConfigFileOutputPlugin' diff --git a/cli/src/plugins/plugin-trae-cn-ide.ts b/cli/src/plugins/plugin-trae-cn-ide.ts deleted file mode 100644 index c064a45f..00000000 --- a/cli/src/plugins/plugin-trae-cn-ide.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - TraeCNIDEOutputPlugin -} from './TraeCNIDEOutputPlugin' diff --git a/cli/src/plugins/plugin-trae-ide.ts b/cli/src/plugins/plugin-trae-ide.ts deleted file mode 100644 index d194f82b..00000000 --- a/cli/src/plugins/plugin-trae-ide.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - TraeIDEOutputPlugin -} from './TraeIDEOutputPlugin' diff --git a/cli/src/plugins/plugin-vscode.ts b/cli/src/plugins/plugin-vscode.ts deleted file mode 100644 index c8848542..00000000 --- a/cli/src/plugins/plugin-vscode.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - VisualStudioCodeIDEConfigOutputPlugin -} from './VisualStudioCodeIDEConfigOutputPlugin' diff --git a/cli/src/plugins/plugin-warp-ide.ts b/cli/src/plugins/plugin-warp-ide.ts deleted file mode 100644 index b9e1bf10..00000000 --- a/cli/src/plugins/plugin-warp-ide.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - WarpIDEOutputPlugin -} from './WarpIDEOutputPlugin' diff --git a/cli/src/plugins/plugin-windsurf.ts b/cli/src/plugins/plugin-windsurf.ts deleted file mode 100644 index e749bd3d..00000000 --- a/cli/src/plugins/plugin-windsurf.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - WindsurfOutputPlugin -} from './WindsurfOutputPlugin' diff --git a/cli/src/plugins/plugin-zed.ts b/cli/src/plugins/plugin-zed.ts deleted file mode 100644 index 85ad398b..00000000 --- a/cli/src/plugins/plugin-zed.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - ZedIDEConfigOutputPlugin -} from './ZedIDEConfigOutputPlugin' diff --git a/cli/src/prompts.test.ts b/cli/src/prompts.test.ts deleted file mode 100644 index add5c693..00000000 --- a/cli/src/prompts.test.ts +++ /dev/null @@ -1,367 +0,0 @@ -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import {afterEach, describe, expect, it} from 'vitest' -import { - getPrompt, - listPrompts, - resolvePromptDefinition, - upsertPromptSource, - writePromptArtifacts -} from './prompts' - -const tempDirs: string[] = [] - -function createTempWorkspace(prefix: string): string { - const dir = fs.mkdtempSync(path.join(os.tmpdir(), prefix)) - tempDirs.push(dir) - return dir -} - -function writeFile(filePath: string, content: string, modifiedAt: Date): void { - fs.mkdirSync(path.dirname(filePath), {recursive: true}) - fs.writeFileSync(filePath, content, 'utf8') - fs.utimesSync(filePath, modifiedAt, modifiedAt) -} - -function serviceOptions(workspaceDir: string) { - return { - loadUserConfig: false, - pluginOptions: { - workspaceDir - } - } as const -} - -afterEach(() => { - for (const dir of tempDirs.splice(0)) fs.rmSync(dir, {recursive: true, force: true}) -}) - -describe('prompt catalog service', () => { - it('lists every managed prompt family with status metadata', async () => { - const workspaceDir = createTempWorkspace('tnmsc-prompts-') - const aindexDir = path.join(workspaceDir, 'aindex') - const now = Date.now() - - writeFile( - path.join(aindexDir, 'global.src.mdx'), - '---\ndescription: global zh\n---\nGlobal zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'global.mdx'), - '---\ndescription: global en\n---\nGlobal en', - new Date(now - 10_000) - ) - writeFile( - path.join(aindexDir, 'dist', 'global.mdx'), - '---\ndescription: global dist\n---\nGlobal dist', - new Date(now - 10_000) - ) - - writeFile( - path.join(aindexDir, 'workspace.src.mdx'), - '---\ndescription: workspace zh\n---\nWorkspace zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'workspace.mdx'), - '---\ndescription: workspace en\n---\nWorkspace en', - new Date(now + 1_000) - ) - writeFile( - path.join(aindexDir, 'dist', 'workspace.mdx'), - '---\ndescription: workspace dist\n---\nWorkspace dist', - new Date(now + 1_000) - ) - - writeFile( - path.join(aindexDir, 'app', 'project-a', 'agt.src.mdx'), - '---\ndescription: project zh\n---\nProject zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'app', 'project-a', 'agt.mdx'), - '---\ndescription: project en\n---\nProject en', - new Date(now + 1_000) - ) - writeFile( - path.join(aindexDir, 'dist', 'app', 'project-a', 'agt.mdx'), - '---\ndescription: project dist\n---\nProject dist', - new Date(now + 1_000) - ) - - writeFile( - path.join(aindexDir, 'app', 'project-b', 'docs', 'agt.mdx'), - '---\ndescription: child legacy zh\n---\nChild legacy zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'dist', 'app', 'project-b', 'docs', 'agt.mdx'), - '---\ndescription: child dist\n---\nChild dist', - new Date(now + 1_000) - ) - - writeFile( - path.join(aindexDir, 'ext', 'project-a', 'agt.src.mdx'), - '---\ndescription: ext project zh\n---\nExt project zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'dist', 'ext', 'project-a', 'agt.mdx'), - '---\ndescription: ext project dist\n---\nExt project dist', - new Date(now + 1_000) - ) - - writeFile( - path.join(aindexDir, 'arch', 'system-a', 'agt.src.mdx'), - '---\ndescription: arch project zh\n---\nArch project zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'dist', 'arch', 'system-a', 'agt.mdx'), - '---\ndescription: arch project dist\n---\nArch project dist', - new Date(now + 1_000) - ) - - writeFile( - path.join(aindexDir, 'softwares', 'tool-a', 'agt.src.mdx'), - '---\ndescription: software project zh\n---\nSoftware project zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'dist', 'softwares', 'tool-a', 'agt.mdx'), - '---\ndescription: software project dist\n---\nSoftware project dist', - new Date(now + 1_000) - ) - - writeFile( - path.join(aindexDir, 'skills', 'reviewer', 'skill.src.mdx'), - '---\ndescription: skill zh\n---\nSkill zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'skills', 'reviewer', 'skill.mdx'), - '---\ndescription: skill en\n---\nSkill en', - new Date(now + 1_000) - ) - writeFile( - path.join(aindexDir, 'skills', 'reviewer', 'guide.src.mdx'), - '---\ndescription: guide zh\n---\nGuide zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'dist', 'skills', 'reviewer', 'skill.mdx'), - '---\ndescription: skill dist\n---\nSkill dist', - new Date(now + 1_000) - ) - writeFile( - path.join(aindexDir, 'dist', 'skills', 'reviewer', 'guide.mdx'), - '---\ndescription: guide dist\n---\nGuide dist', - new Date(now + 1_000) - ) - - writeFile( - path.join(aindexDir, 'commands', 'dev', 'build.src.mdx'), - '---\ndescription: command zh\n---\nCommand zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'dist', 'commands', 'dev', 'build.mdx'), - '---\ndescription: command dist\n---\nCommand dist', - new Date(now + 1_000) - ) - - writeFile( - path.join(aindexDir, 'subagents', 'qa', 'boot.src.mdx'), - '---\nname: boot\ndescription: subagent zh\n---\nSubagent zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'subagents', 'qa', 'boot.mdx'), - '---\nname: boot\ndescription: subagent en\n---\nSubagent en', - new Date(now + 1_000) - ) - writeFile( - path.join(aindexDir, 'dist', 'subagents', 'qa', 'boot.mdx'), - '---\nname: boot\ndescription: subagent dist\n---\nSubagent dist', - new Date(now + 1_000) - ) - - writeFile( - path.join(aindexDir, 'rules', 'frontend.src.mdx'), - '---\ndescription: rule zh\nglobs: ["src/**"]\n---\nRule zh', - new Date(now) - ) - writeFile( - path.join(aindexDir, 'dist', 'rules', 'frontend.mdx'), - '---\ndescription: rule dist\nglobs: ["src/**"]\n---\nRule dist', - new Date(now + 1_000) - ) - - const prompts = await listPrompts(serviceOptions(workspaceDir)) - - expect(prompts.map(prompt => prompt.promptId)).toEqual([ - 'command:dev/build', - 'global-memory', - 'project-child-memory:app/project-b/docs', - 'project-memory:app/project-a', - 'project-memory:arch/system-a', - 'project-memory:ext/project-a', - 'project-memory:softwares/tool-a', - 'rule:frontend', - 'skill-child-doc:reviewer/guide', - 'skill:reviewer', - 'subagent:qa/boot', - 'workspace-memory' - ]) - expect(prompts.find(prompt => prompt.promptId === 'global-memory')).toEqual(expect.objectContaining({enStatus: 'stale', distStatus: 'stale'})) - expect(prompts.find(prompt => prompt.promptId === 'workspace-memory')).toEqual(expect.objectContaining({enStatus: 'ready', distStatus: 'ready'})) - expect(prompts.find(prompt => prompt.promptId === 'project-child-memory:app/project-b/docs')).toEqual(expect.objectContaining({ - legacyZhSource: true, - enStatus: 'missing', - distStatus: 'ready' - })) - expect(prompts.find(prompt => prompt.promptId === 'project-memory:ext/project-a')).toEqual(expect.objectContaining({ - logicalName: 'ext/project-a', - distStatus: 'ready' - })) - expect(prompts.find(prompt => prompt.promptId === 'command:dev/build')).toEqual(expect.objectContaining({enStatus: 'missing', distStatus: 'ready'})) - - const filtered = await listPrompts({ - ...serviceOptions(workspaceDir), - kinds: ['project-memory'], - distStatus: ['ready'] - }) - - expect(filtered.map(prompt => prompt.promptId)).toEqual([ - 'project-memory:app/project-a', - 'project-memory:arch/system-a', - 'project-memory:ext/project-a', - 'project-memory:softwares/tool-a' - ]) - }) - - it('returns prompt contents and expected paths', async () => { - const workspaceDir = createTempWorkspace('tnmsc-prompt-details-') - const aindexDir = path.join(workspaceDir, 'aindex') - const modifiedAt = new Date() - - writeFile( - path.join(aindexDir, 'skills', 'reviewer', 'skill.src.mdx'), - '---\ndescription: skill zh\n---\nSkill zh', - modifiedAt - ) - writeFile( - path.join(aindexDir, 'skills', 'reviewer', 'skill.mdx'), - '---\ndescription: skill en\n---\nSkill en', - modifiedAt - ) - writeFile( - path.join(aindexDir, 'dist', 'skills', 'reviewer', 'skill.mdx'), - '---\ndescription: skill dist\n---\nSkill dist', - modifiedAt - ) - - const prompt = await getPrompt('skill:reviewer', serviceOptions(workspaceDir)) - const resolvedPaths = await resolvePromptDefinition('skill:reviewer', serviceOptions(workspaceDir)) - - expect(prompt).toEqual(expect.objectContaining({ - promptId: 'skill:reviewer', - frontMatter: expect.objectContaining({description: 'skill zh'}) - })) - expect(prompt?.src.zh?.content).toContain('Skill zh') - expect(prompt?.src.en?.content).toContain('Skill en') - expect(prompt?.dist?.content).toContain('Skill dist') - expect(resolvedPaths).toEqual(prompt?.paths) - }) - - it('migrates legacy project memory to the new zh/en source convention', async () => { - const workspaceDir = createTempWorkspace('tnmsc-project-migration-') - const aindexDir = path.join(workspaceDir, 'aindex') - const legacyPath = path.join(aindexDir, 'app', 'project-c', 'agt.mdx') - - writeFile( - legacyPath, - '---\ndescription: legacy zh\n---\nLegacy zh', - new Date() - ) - - const migrated = await upsertPromptSource({ - ...serviceOptions(workspaceDir), - promptId: 'project-memory:project-c', - locale: 'en', - content: '---\ndescription: translated en\n---\nTranslated en' - }) - - expect(fs.readFileSync(path.join(aindexDir, 'app', 'project-c', 'agt.src.mdx'), 'utf8')).toContain('Legacy zh') - expect(fs.readFileSync(legacyPath, 'utf8')).toContain('Translated en') - expect(migrated.promptId).toBe('project-memory:app/project-c') - expect(migrated.src.zh?.legacySource).toBeUndefined() - expect(migrated.src.en?.content).toContain('Translated en') - - const rewritten = await upsertPromptSource({ - ...serviceOptions(workspaceDir), - promptId: 'project-memory:project-c', - locale: 'zh', - content: '---\ndescription: rewritten zh\n---\nRewritten zh' - }) - - expect(fs.readFileSync(path.join(aindexDir, 'app', 'project-c', 'agt.src.mdx'), 'utf8')).toContain('Rewritten zh') - expect(fs.existsSync(legacyPath)).toBe(false) - expect(rewritten.exists.en).toBe(false) - }) - - it('accepts legacy app project IDs while resolving to series-aware paths', async () => { - const workspaceDir = createTempWorkspace('tnmsc-project-legacy-id-') - const aindexDir = path.join(workspaceDir, 'aindex') - const modifiedAt = new Date() - - writeFile( - path.join(aindexDir, 'app', 'project-a', 'agt.src.mdx'), - '---\ndescription: project zh\n---\nProject zh', - modifiedAt - ) - writeFile( - path.join(aindexDir, 'dist', 'app', 'project-a', 'agt.mdx'), - '---\ndescription: project dist\n---\nProject dist', - modifiedAt - ) - - const prompt = await getPrompt('project-memory:project-a', serviceOptions(workspaceDir)) - const resolvedPaths = await resolvePromptDefinition('project-memory:project-a', serviceOptions(workspaceDir)) - - expect(prompt?.promptId).toBe('project-memory:app/project-a') - expect(resolvedPaths.zh).toBe(path.join(aindexDir, 'app', 'project-a', 'agt.src.mdx')) - expect(resolvedPaths.dist).toBe(path.join(aindexDir, 'dist', 'app', 'project-a', 'agt.mdx')) - }) - - it('writes translation artifacts independently for en and dist', async () => { - const workspaceDir = createTempWorkspace('tnmsc-translation-write-') - const aindexDir = path.join(workspaceDir, 'aindex') - - writeFile( - path.join(aindexDir, 'commands', 'dev', 'ship.src.mdx'), - '---\ndescription: ship zh\n---\nShip zh', - new Date() - ) - - const afterEnWrite = await writePromptArtifacts({ - ...serviceOptions(workspaceDir), - promptId: 'command:dev/ship', - enContent: '---\ndescription: ship en\n---\nShip en' - }) - - expect(afterEnWrite.src.en?.content).toContain('Ship en') - expect(afterEnWrite.distStatus).toBe('missing') - - const afterDistWrite = await writePromptArtifacts({ - ...serviceOptions(workspaceDir), - promptId: 'command:dev/ship', - distContent: '---\ndescription: ship dist\n---\nShip dist' - }) - - expect(afterDistWrite.dist?.content).toContain('Ship dist') - expect(afterDistWrite.distStatus).toBe('ready') - }) -}) diff --git a/cli/src/prompts.ts b/cli/src/prompts.ts deleted file mode 100644 index b04dd9b4..00000000 --- a/cli/src/prompts.ts +++ /dev/null @@ -1,804 +0,0 @@ -import type {AindexProjectSeriesName, PluginOptions, YAMLFrontMatter} from '@/plugins/plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {parseMarkdown} from '@truenine/md-compiler/markdown' -import glob from 'fast-glob' -import { - isAindexProjectSeriesName, - resolveAindexProjectSeriesConfig, - resolveAindexProjectSeriesConfigs -} from '@/aindex-project-series' -import {mergeConfig, userConfigToPluginOptions} from './config' -import {getConfigLoader} from './ConfigLoader' -import {PathPlaceholders} from './plugins/plugin-core' -import {resolveUserPath} from './runtime-environment' - -export type ManagedPromptKind - = | 'global-memory' - | 'workspace-memory' - | 'project-memory' - | 'project-child-memory' - | 'skill' - | 'skill-child-doc' - | 'command' - | 'subagent' - | 'rule' - -export type PromptArtifactState = 'missing' | 'stale' | 'ready' -export type PromptSourceLocale = 'zh' | 'en' - -export interface PromptServiceOptions { - readonly cwd?: string - readonly loadUserConfig?: boolean - readonly pluginOptions?: Partial -} - -export interface ListPromptsOptions extends PromptServiceOptions { - readonly kinds?: readonly ManagedPromptKind[] - readonly query?: string - readonly enStatus?: readonly PromptArtifactState[] - readonly distStatus?: readonly PromptArtifactState[] -} - -export interface PromptArtifactRecord { - readonly path: string - readonly exists: true - readonly mtime: string - readonly mtimeMs: number - readonly size: number - readonly legacySource?: true - readonly frontMatter?: YAMLFrontMatter - readonly content?: string -} - -export interface PromptCatalogPaths { - readonly zh: string - readonly en: string - readonly dist: string -} - -export interface PromptCatalogPresence { - readonly zh: boolean - readonly en: boolean - readonly dist: boolean -} - -export interface PromptCatalogItem { - readonly promptId: string - readonly kind: ManagedPromptKind - readonly logicalName: string - readonly paths: PromptCatalogPaths - readonly exists: PromptCatalogPresence - readonly enStatus: PromptArtifactState - readonly distStatus: PromptArtifactState - readonly updatedAt?: string - readonly legacyZhSource?: true -} - -export interface PromptDetails extends PromptCatalogItem { - readonly src: { - readonly zh?: PromptArtifactRecord - readonly en?: PromptArtifactRecord - } - readonly dist?: PromptArtifactRecord - readonly frontMatter?: YAMLFrontMatter -} - -export interface UpsertPromptSourceInput extends PromptServiceOptions { - readonly promptId: string - readonly locale?: PromptSourceLocale - readonly content: string -} - -export interface WritePromptArtifactsInput extends PromptServiceOptions { - readonly promptId: string - readonly enContent?: string - readonly distContent?: string -} - -interface ResolvedPromptEnvironment { - readonly options: Required - readonly workspaceDir: string - readonly aindexDir: string -} - -interface PromptDefinition { - readonly promptId: string - readonly kind: ManagedPromptKind - readonly logicalName: string - readonly paths: PromptCatalogPaths - readonly legacyZhPath?: string -} - -interface PromptIdDescriptor { - readonly kind: ManagedPromptKind - readonly seriesName?: AindexProjectSeriesName - readonly projectName?: string - readonly relativeName?: string - readonly skillName?: string -} - -const SOURCE_PROMPT_EXTENSION = '.src.mdx' -const MDX_EXTENSION = '.mdx' -const PROJECT_MEMORY_FILE_NAME = 'agt' -const SKILL_ENTRY_FILE_NAME = 'skill' -const LEGACY_PROJECT_MEMORY_KINDS = new Set([ - 'project-memory', - 'project-child-memory' -]) - -function normalizeSlashPath(value: string): string { - return value.replaceAll('\\', '/') -} - -function normalizeRelativeIdentifier(value: string, fieldName: string): string { - const normalized = normalizeSlashPath(value).trim() - if (normalized.length === 0) throw new Error(`${fieldName} cannot be empty`) - - const segments = normalized.split('/') - for (const segment of segments) { - if (segment.length === 0 || segment === '.' || segment === '..') throw new Error(`${fieldName} contains an invalid path segment`) - } - - return segments.join('/') -} - -function isSingleSegmentIdentifier(value: string): boolean { - return !normalizeSlashPath(value).includes('/') -} - -function resolveConfiguredPath(rawPath: string, workspaceDir: string): string { - let resolved = rawPath - - if (resolved.includes(PathPlaceholders.WORKSPACE)) resolved = resolved.replace(PathPlaceholders.WORKSPACE, workspaceDir) - - return resolveUserPath(resolved) -} - -function resolvePromptEnvironment(options: PromptServiceOptions = {}): ResolvedPromptEnvironment { - const {cwd, loadUserConfig = true, pluginOptions = {}} = options - let userConfigOptions: Partial = {} - - if (loadUserConfig) { - const userConfigResult = getConfigLoader().load(cwd) - if (userConfigResult.found) userConfigOptions = userConfigToPluginOptions(userConfigResult.config) - } - - const mergedOptions = mergeConfig(userConfigOptions, pluginOptions) - const workspaceDir = resolveConfiguredPath(mergedOptions.workspaceDir, '') - const aindexDir = path.join(workspaceDir, mergedOptions.aindex.dir) - - return { - options: mergedOptions, - workspaceDir, - aindexDir - } -} - -function deriveEnglishSourcePath(zhPath: string): string { - if (zhPath.endsWith(SOURCE_PROMPT_EXTENSION)) return `${zhPath.slice(0, -SOURCE_PROMPT_EXTENSION.length)}${MDX_EXTENSION}` - - const ext = path.extname(zhPath) - if (ext === MDX_EXTENSION) return zhPath - return `${zhPath}${MDX_EXTENSION}` -} - -function stripPromptExtension(filePath: string): string { - if (filePath.endsWith(SOURCE_PROMPT_EXTENSION)) return filePath.slice(0, -SOURCE_PROMPT_EXTENSION.length) - - if (filePath.endsWith(MDX_EXTENSION)) return filePath.slice(0, -MDX_EXTENSION.length) - - return filePath -} - -function listFiles(cwd: string, patterns: readonly string[]): string[] { - if (!(fs.existsSync(cwd) && fs.statSync(cwd).isDirectory())) return [] - - return glob.sync([...patterns], { - cwd, - dot: true, - onlyFiles: true - }).map(normalizeSlashPath) -} - -function buildGlobalMemoryDefinition(env: ResolvedPromptEnvironment): PromptDefinition { - const zhPath = path.join(env.aindexDir, env.options.aindex.globalPrompt.src) - - return { - promptId: 'global-memory', - kind: 'global-memory', - logicalName: 'global-memory', - paths: { - zh: zhPath, - en: deriveEnglishSourcePath(zhPath), - dist: path.join(env.aindexDir, env.options.aindex.globalPrompt.dist) - } - } -} - -function buildWorkspaceMemoryDefinition(env: ResolvedPromptEnvironment): PromptDefinition { - const zhPath = path.join(env.aindexDir, env.options.aindex.workspacePrompt.src) - - return { - promptId: 'workspace-memory', - kind: 'workspace-memory', - logicalName: 'workspace-memory', - paths: { - zh: zhPath, - en: deriveEnglishSourcePath(zhPath), - dist: path.join(env.aindexDir, env.options.aindex.workspacePrompt.dist) - } - } -} - -function buildProjectMemoryDefinition( - env: ResolvedPromptEnvironment, - seriesName: AindexProjectSeriesName, - projectName: string, - relativeName?: string -): PromptDefinition { - const normalizedProjectName = normalizeRelativeIdentifier(projectName, 'projectName') - if (!isSingleSegmentIdentifier(normalizedProjectName)) throw new Error('projectName must be a single path segment') - - const normalizedRelativeName = relativeName == null - ? '' - : normalizeRelativeIdentifier(relativeName, 'relativeName') - const seriesConfig = resolveAindexProjectSeriesConfig(env.options, seriesName) - const sourceDir = normalizedRelativeName.length === 0 - ? path.join(env.aindexDir, seriesConfig.src, normalizedProjectName) - : path.join(env.aindexDir, seriesConfig.src, normalizedProjectName, normalizedRelativeName) - const distDir = normalizedRelativeName.length === 0 - ? path.join(env.aindexDir, seriesConfig.dist, normalizedProjectName) - : path.join(env.aindexDir, seriesConfig.dist, normalizedProjectName, normalizedRelativeName) - const legacyPath = path.join(sourceDir, `${PROJECT_MEMORY_FILE_NAME}${MDX_EXTENSION}`) - const logicalSuffix = normalizedRelativeName.length === 0 - ? `${seriesName}/${normalizedProjectName}` - : `${seriesName}/${normalizedProjectName}/${normalizedRelativeName}` - - return { - promptId: normalizedRelativeName.length === 0 - ? `project-memory:${logicalSuffix}` - : `project-child-memory:${logicalSuffix}`, - kind: normalizedRelativeName.length === 0 ? 'project-memory' : 'project-child-memory', - logicalName: logicalSuffix, - paths: { - zh: path.join(sourceDir, `${PROJECT_MEMORY_FILE_NAME}${SOURCE_PROMPT_EXTENSION}`), - en: legacyPath, - dist: path.join(distDir, `${PROJECT_MEMORY_FILE_NAME}${MDX_EXTENSION}`) - }, - legacyZhPath: legacyPath - } -} - -function buildSkillDefinition( - env: ResolvedPromptEnvironment, - skillName: string -): PromptDefinition { - const normalizedSkillName = normalizeRelativeIdentifier(skillName, 'skillName') - if (!isSingleSegmentIdentifier(normalizedSkillName)) throw new Error('skillName must be a single path segment') - - const sourceDir = path.join(env.aindexDir, env.options.aindex.skills.src, normalizedSkillName) - const distDir = path.join(env.aindexDir, env.options.aindex.skills.dist, normalizedSkillName) - - return { - promptId: `skill:${normalizedSkillName}`, - kind: 'skill', - logicalName: normalizedSkillName, - paths: { - zh: path.join(sourceDir, `${SKILL_ENTRY_FILE_NAME}${SOURCE_PROMPT_EXTENSION}`), - en: path.join(sourceDir, `${SKILL_ENTRY_FILE_NAME}${MDX_EXTENSION}`), - dist: path.join(distDir, `${SKILL_ENTRY_FILE_NAME}${MDX_EXTENSION}`) - } - } -} - -function buildSkillChildDocDefinition( - env: ResolvedPromptEnvironment, - skillName: string, - relativeName: string -): PromptDefinition { - const normalizedSkillName = normalizeRelativeIdentifier(skillName, 'skillName') - const normalizedRelativeName = normalizeRelativeIdentifier(relativeName, 'relativeName') - if (!isSingleSegmentIdentifier(normalizedSkillName)) throw new Error('skillName must be a single path segment') - - const sourceDir = path.join(env.aindexDir, env.options.aindex.skills.src, normalizedSkillName) - const distDir = path.join(env.aindexDir, env.options.aindex.skills.dist, normalizedSkillName) - - return { - promptId: `skill-child-doc:${normalizedSkillName}/${normalizedRelativeName}`, - kind: 'skill-child-doc', - logicalName: `${normalizedSkillName}/${normalizedRelativeName}`, - paths: { - zh: path.join(sourceDir, `${normalizedRelativeName}${SOURCE_PROMPT_EXTENSION}`), - en: path.join(sourceDir, `${normalizedRelativeName}${MDX_EXTENSION}`), - dist: path.join(distDir, `${normalizedRelativeName}${MDX_EXTENSION}`) - } - } -} - -function buildFlatPromptDefinition( - env: ResolvedPromptEnvironment, - kind: Extract, - relativeName: string -): PromptDefinition { - const normalizedRelativeName = normalizeRelativeIdentifier(relativeName, 'relativeName') - const sourceDir = kind === 'command' - ? path.join(env.aindexDir, env.options.aindex.commands.src) - : kind === 'subagent' - ? path.join(env.aindexDir, env.options.aindex.subAgents.src) - : path.join(env.aindexDir, env.options.aindex.rules.src) - const distDir = kind === 'command' - ? path.join(env.aindexDir, env.options.aindex.commands.dist) - : kind === 'subagent' - ? path.join(env.aindexDir, env.options.aindex.subAgents.dist) - : path.join(env.aindexDir, env.options.aindex.rules.dist) - - return { - promptId: `${kind}:${normalizedRelativeName}`, - kind, - logicalName: normalizedRelativeName, - paths: { - zh: path.join(sourceDir, `${normalizedRelativeName}${SOURCE_PROMPT_EXTENSION}`), - en: path.join(sourceDir, `${normalizedRelativeName}${MDX_EXTENSION}`), - dist: path.join(distDir, `${normalizedRelativeName}${MDX_EXTENSION}`) - } - } -} - -function parsePromptId(promptId: string): PromptIdDescriptor { - switch (promptId) { - case 'global-memory': return {kind: 'global-memory'} - case 'workspace-memory': return {kind: 'workspace-memory'} - default: break - } - - const separatorIndex = promptId.indexOf(':') - if (separatorIndex === -1) throw new Error(`Unsupported promptId: ${promptId}`) - - const kind = promptId.slice(0, separatorIndex) as ManagedPromptKind - const rawValue = promptId.slice(separatorIndex + 1) - const normalizedValue = normalizeRelativeIdentifier(rawValue, 'promptId') - - switch (kind) { - case 'project-memory': - return parseProjectPromptDescriptor(kind, normalizedValue) - case 'project-child-memory': { - return parseProjectPromptDescriptor(kind, normalizedValue) - } - case 'skill': - if (!isSingleSegmentIdentifier(normalizedValue)) throw new Error('skill promptId must include a single skill name') - return {kind, skillName: normalizedValue} - case 'skill-child-doc': { - const [skillName, ...rest] = normalizedValue.split('/') - const relativeName = rest.join('/') - if (skillName == null || relativeName.length === 0) throw new Error('skill-child-doc promptId must include skill and child path') - return {kind, skillName, relativeName} - } - case 'command': - case 'subagent': - case 'rule': return {kind, relativeName: normalizedValue} - default: throw new Error(`Unsupported promptId: ${promptId}`) - } -} - -function parseProjectPromptDescriptor( - kind: Extract, - normalizedValue: string -): PromptIdDescriptor { - const segments = normalizedValue.split('/') - const maybeSeriesName = segments[0] - const hasSeriesName = maybeSeriesName != null && isAindexProjectSeriesName(maybeSeriesName) - - if (kind === 'project-memory') { - if (hasSeriesName) { - const projectName = segments[1] - if (projectName == null || segments.length !== 2) throw new Error('project-memory promptId must include exactly one project name after the series') - return {kind, seriesName: maybeSeriesName, projectName} - } - - if (!isSingleSegmentIdentifier(normalizedValue)) throw new Error('project-memory promptId must include a single project name') - return {kind, seriesName: 'app', projectName: normalizedValue} - } - - if (hasSeriesName) { - const projectName = segments[1] - const relativeName = segments.slice(2).join('/') - if (projectName == null || relativeName.length === 0) throw new Error('project-child-memory promptId must include series, project, and child path') - return {kind, seriesName: maybeSeriesName, projectName, relativeName} - } - - const [projectName, ...rest] = segments - const relativeName = rest.join('/') - if (projectName == null || relativeName.length === 0) throw new Error('project-child-memory promptId must include project and child path') - return {kind, seriesName: 'app', projectName, relativeName} -} - -function buildPromptDefinitionFromId( - promptId: string, - env: ResolvedPromptEnvironment -): PromptDefinition { - const descriptor = parsePromptId(promptId) - - switch (descriptor.kind) { - case 'global-memory': return buildGlobalMemoryDefinition(env) - case 'workspace-memory': return buildWorkspaceMemoryDefinition(env) - case 'project-memory': - if (descriptor.projectName == null) throw new Error('project-memory promptId must include a project name') - return buildProjectMemoryDefinition(env, descriptor.seriesName ?? 'app', descriptor.projectName) - case 'project-child-memory': - if (descriptor.projectName == null || descriptor.relativeName == null) { - throw new Error('project-child-memory promptId must include project and child path') - } - return buildProjectMemoryDefinition(env, descriptor.seriesName ?? 'app', descriptor.projectName, descriptor.relativeName) - case 'skill': - if (descriptor.skillName == null) throw new Error('skill promptId must include a skill name') - return buildSkillDefinition(env, descriptor.skillName) - case 'skill-child-doc': - if (descriptor.skillName == null || descriptor.relativeName == null) { - throw new Error('skill-child-doc promptId must include skill and child path') - } - return buildSkillChildDocDefinition(env, descriptor.skillName, descriptor.relativeName) - case 'command': - case 'subagent': - case 'rule': - if (descriptor.relativeName == null) throw new Error(`${descriptor.kind} promptId must include a relative path`) - return buildFlatPromptDefinition(env, descriptor.kind, descriptor.relativeName) - } -} - -function collectFlatPromptIds( - env: ResolvedPromptEnvironment, - kind: Extract -): string[] { - const sourceDir = kind === 'command' - ? path.join(env.aindexDir, env.options.aindex.commands.src) - : kind === 'subagent' - ? path.join(env.aindexDir, env.options.aindex.subAgents.src) - : path.join(env.aindexDir, env.options.aindex.rules.src) - const distDir = kind === 'command' - ? path.join(env.aindexDir, env.options.aindex.commands.dist) - : kind === 'subagent' - ? path.join(env.aindexDir, env.options.aindex.subAgents.dist) - : path.join(env.aindexDir, env.options.aindex.rules.dist) - const names = new Set() - - for (const match of listFiles(sourceDir, [`**/*${SOURCE_PROMPT_EXTENSION}`, `**/*${MDX_EXTENSION}`])) names.add(stripPromptExtension(match)) - - for (const match of listFiles(distDir, [`**/*${MDX_EXTENSION}`])) names.add(stripPromptExtension(match)) - - return [...names].sort().map(name => `${kind}:${name}`) -} - -function collectSkillPromptIds(env: ResolvedPromptEnvironment): string[] { - const sourceRoot = path.join(env.aindexDir, env.options.aindex.skills.src) - const distRoot = path.join(env.aindexDir, env.options.aindex.skills.dist) - const skillNames = new Set() - - if (fs.existsSync(sourceRoot) && fs.statSync(sourceRoot).isDirectory()) { - for (const entry of fs.readdirSync(sourceRoot, {withFileTypes: true})) { - if (entry.isDirectory()) skillNames.add(entry.name) - } - } - - if (fs.existsSync(distRoot) && fs.statSync(distRoot).isDirectory()) { - for (const entry of fs.readdirSync(distRoot, {withFileTypes: true})) { - if (entry.isDirectory()) skillNames.add(entry.name) - } - } - - const promptIds: string[] = [] - - for (const skillName of [...skillNames].sort()) { - promptIds.push(`skill:${skillName}`) - - const sourceDir = path.join(sourceRoot, skillName) - const distDir = path.join(distRoot, skillName) - const childNames = new Set() - - for (const match of listFiles(sourceDir, [`**/*${SOURCE_PROMPT_EXTENSION}`, `**/*${MDX_EXTENSION}`])) { - const stripped = stripPromptExtension(match) - if (stripped === SKILL_ENTRY_FILE_NAME) continue - childNames.add(stripped) - } - - for (const match of listFiles(distDir, [`**/*${MDX_EXTENSION}`])) { - const stripped = stripPromptExtension(match) - if (stripped === SKILL_ENTRY_FILE_NAME) continue - childNames.add(stripped) - } - - for (const childName of [...childNames].sort()) promptIds.push(`skill-child-doc:${skillName}/${childName}`) - } - - return promptIds -} - -function collectProjectPromptIds(env: ResolvedPromptEnvironment): string[] { - const promptIds: string[] = [] - - for (const series of resolveAindexProjectSeriesConfigs(env.options)) { - const sourceRoot = path.join(env.aindexDir, series.src) - const distRoot = path.join(env.aindexDir, series.dist) - const relativeDirs = new Set() - - for (const match of listFiles(sourceRoot, [`**/${PROJECT_MEMORY_FILE_NAME}${SOURCE_PROMPT_EXTENSION}`, `**/${PROJECT_MEMORY_FILE_NAME}${MDX_EXTENSION}`])) { - const directory = normalizeSlashPath(path.posix.dirname(normalizeSlashPath(match))) - if (directory !== '.') relativeDirs.add(directory) - } - - for (const match of listFiles(distRoot, [`**/${PROJECT_MEMORY_FILE_NAME}${MDX_EXTENSION}`])) { - const directory = normalizeSlashPath(path.posix.dirname(normalizeSlashPath(match))) - if (directory !== '.') relativeDirs.add(directory) - } - - for (const relativeDir of [...relativeDirs].sort()) { - const [projectName, ...rest] = relativeDir.split('/') - const childPath = rest.join('/') - if (projectName == null || projectName.length === 0) continue - - promptIds.push(childPath.length === 0 - ? `project-memory:${series.name}/${projectName}` - : `project-child-memory:${series.name}/${projectName}/${childPath}`) - } - } - - return promptIds -} - -function collectDiscoveredPromptIds(env: ResolvedPromptEnvironment): string[] { - const promptIds = new Set() - const globalDefinition = buildGlobalMemoryDefinition(env) - const workspaceDefinition = buildWorkspaceMemoryDefinition(env) - - if ( - fs.existsSync(globalDefinition.paths.zh) - || fs.existsSync(globalDefinition.paths.en) - || fs.existsSync(globalDefinition.paths.dist) - ) { - promptIds.add(globalDefinition.promptId) - } - - if ( - fs.existsSync(workspaceDefinition.paths.zh) - || fs.existsSync(workspaceDefinition.paths.en) - || fs.existsSync(workspaceDefinition.paths.dist) - ) { - promptIds.add(workspaceDefinition.promptId) - } - - for (const promptId of collectProjectPromptIds(env)) promptIds.add(promptId) - for (const promptId of collectSkillPromptIds(env)) promptIds.add(promptId) - for (const promptId of collectFlatPromptIds(env, 'command')) promptIds.add(promptId) - for (const promptId of collectFlatPromptIds(env, 'subagent')) promptIds.add(promptId) - for (const promptId of collectFlatPromptIds(env, 'rule')) promptIds.add(promptId) - - return [...promptIds].sort() -} - -function parseFrontMatter(content: string): YAMLFrontMatter | undefined { - try { - return parseMarkdown(content).yamlFrontMatter - } - catch { - return void 0 - } -} - -function readArtifact( - filePath: string, - includeContent: boolean, - legacySource: boolean = false -): PromptArtifactRecord | undefined { - if (!(fs.existsSync(filePath) && fs.statSync(filePath).isFile())) return void 0 - - const stat = fs.statSync(filePath) - const rawContent = includeContent ? fs.readFileSync(filePath, 'utf8') : void 0 - - const artifact: PromptArtifactRecord = { - path: filePath, - exists: true, - mtime: stat.mtime.toISOString(), - mtimeMs: stat.mtimeMs, - size: stat.size, - ...legacySource ? {legacySource: true} : {}, - ...rawContent != null ? {content: rawContent} : {} - } - - const frontMatter = rawContent != null ? parseFrontMatter(rawContent) : void 0 - if (frontMatter != null) Object.assign(artifact, {frontMatter}) - - return artifact -} - -function resolveArtifactStatus( - zhArtifact: PromptArtifactRecord | undefined, - targetArtifact: PromptArtifactRecord | undefined -): PromptArtifactState { - if (targetArtifact == null) return 'missing' - if (zhArtifact != null && targetArtifact.mtimeMs < zhArtifact.mtimeMs) return 'stale' - return 'ready' -} - -function hydratePrompt( - definition: PromptDefinition, - includeContent: boolean -): PromptDetails | null { - const hasCanonicalZh = fs.existsSync(definition.paths.zh) - const {legacyZhPath} = definition - const hasLegacyZh = !hasCanonicalZh - && legacyZhPath != null - && fs.existsSync(legacyZhPath) - const zhArtifactPath = hasCanonicalZh - ? definition.paths.zh - : hasLegacyZh - ? legacyZhPath - : void 0 - const zhArtifact = zhArtifactPath != null - ? readArtifact(zhArtifactPath, includeContent, hasLegacyZh) - : void 0 - const enArtifact = hasCanonicalZh || legacyZhPath !== definition.paths.en - ? readArtifact(definition.paths.en, includeContent) - : void 0 - const distArtifact = readArtifact(definition.paths.dist, includeContent) - - if (zhArtifact == null && enArtifact == null && distArtifact == null) return null - - const updatedAt = [zhArtifact, enArtifact, distArtifact] - .filter((artifact): artifact is PromptArtifactRecord => artifact != null) - .sort((a, b) => b.mtimeMs - a.mtimeMs)[0] - ?.mtime - - const prompt: PromptDetails = { - promptId: definition.promptId, - kind: definition.kind, - logicalName: definition.logicalName, - paths: definition.paths, - exists: { - zh: zhArtifact != null, - en: enArtifact != null, - dist: distArtifact != null - }, - enStatus: resolveArtifactStatus(zhArtifact, enArtifact), - distStatus: resolveArtifactStatus(zhArtifact, distArtifact), - ...updatedAt != null ? {updatedAt} : {}, - ...zhArtifact?.legacySource === true ? {legacyZhSource: true} : {}, - src: { - ...zhArtifact != null ? {zh: zhArtifact} : {}, - ...enArtifact != null ? {en: enArtifact} : {} - } - } - - if (distArtifact != null) Object.assign(prompt, {dist: distArtifact}) - - const frontMatter = zhArtifact?.frontMatter ?? enArtifact?.frontMatter ?? distArtifact?.frontMatter - if (frontMatter != null) Object.assign(prompt, {frontMatter}) - - return prompt -} - -function matchesFilter( - value: T, - allowed: readonly T[] | undefined -): boolean { - if (allowed == null || allowed.length === 0) return true - return allowed.includes(value) -} - -function matchesQuery(item: PromptCatalogItem, query: string | undefined): boolean { - if (query == null || query.trim().length === 0) return true - const normalizedQuery = query.trim().toLowerCase() - return item.promptId.toLowerCase().includes(normalizedQuery) - || item.logicalName.toLowerCase().includes(normalizedQuery) -} - -function toCatalogItem(prompt: PromptDetails): PromptCatalogItem { - return { - promptId: prompt.promptId, - kind: prompt.kind, - logicalName: prompt.logicalName, - paths: prompt.paths, - exists: prompt.exists, - enStatus: prompt.enStatus, - distStatus: prompt.distStatus, - ...prompt.updatedAt != null ? {updatedAt: prompt.updatedAt} : {}, - ...prompt.legacyZhSource === true ? {legacyZhSource: true} : {} - } -} - -function isProjectMemoryDefinition(definition: PromptDefinition): boolean { - return LEGACY_PROJECT_MEMORY_KINDS.has(definition.kind) -} - -function writeTextFile(filePath: string, content: string): void { - fs.mkdirSync(path.dirname(filePath), {recursive: true}) - fs.writeFileSync(filePath, content, 'utf8') -} - -function prepareProjectMemoryForEnglishWrite(definition: PromptDefinition): void { - if (!isProjectMemoryDefinition(definition)) return - if (fs.existsSync(definition.paths.zh)) return - if (definition.legacyZhPath == null || !fs.existsSync(definition.legacyZhPath)) return - - const legacyContent = fs.readFileSync(definition.legacyZhPath, 'utf8') - writeTextFile(definition.paths.zh, legacyContent) -} - -function migrateLegacyProjectMemorySourceOnZhWrite(definition: PromptDefinition): void { - if (!isProjectMemoryDefinition(definition)) return - if (definition.legacyZhPath == null || definition.legacyZhPath === definition.paths.zh) return - if (!fs.existsSync(definition.legacyZhPath)) return - - fs.rmSync(definition.legacyZhPath, {force: true}) -} - -export async function listPrompts( - options: ListPromptsOptions = {} -): Promise { - const env = resolvePromptEnvironment(options) - const items = collectDiscoveredPromptIds(env) - .map(promptId => hydratePrompt(buildPromptDefinitionFromId(promptId, env), false)) - .filter((item): item is PromptDetails => item != null) - .map(toCatalogItem) - .filter(item => matchesFilter(item.kind, options.kinds)) - .filter(item => matchesFilter(item.enStatus, options.enStatus)) - .filter(item => matchesFilter(item.distStatus, options.distStatus)) - .filter(item => matchesQuery(item, options.query)) - - return items.sort((a, b) => a.promptId.localeCompare(b.promptId)) -} - -export async function getPrompt( - promptId: string, - options: PromptServiceOptions = {} -): Promise { - const env = resolvePromptEnvironment(options) - return hydratePrompt(buildPromptDefinitionFromId(promptId, env), true) -} - -export async function upsertPromptSource( - input: UpsertPromptSourceInput -): Promise { - const env = resolvePromptEnvironment(input) - const locale = input.locale ?? 'zh' - const definition = buildPromptDefinitionFromId(input.promptId, env) - - if (locale === 'zh') { - writeTextFile(definition.paths.zh, input.content) - migrateLegacyProjectMemorySourceOnZhWrite(definition) - } else { - prepareProjectMemoryForEnglishWrite(definition) - writeTextFile(definition.paths.en, input.content) - } - - const prompt = hydratePrompt(definition, true) - if (prompt == null) throw new Error(`Failed to load prompt after write: ${input.promptId}`) - return prompt -} - -export async function writePromptArtifacts( - input: WritePromptArtifactsInput -): Promise { - if (input.enContent == null && input.distContent == null) throw new Error('writePromptArtifacts requires enContent or distContent') - - const env = resolvePromptEnvironment(input) - const definition = buildPromptDefinitionFromId(input.promptId, env) - - if (input.enContent != null) { - prepareProjectMemoryForEnglishWrite(definition) - writeTextFile(definition.paths.en, input.enContent) - } - - if (input.distContent != null) writeTextFile(definition.paths.dist, input.distContent) - - const prompt = hydratePrompt(definition, true) - if (prompt == null) throw new Error(`Failed to load prompt after write: ${input.promptId}`) - return prompt -} - -export async function resolvePromptDefinition( - promptId: string, - options: PromptServiceOptions = {} -): Promise { - const env = resolvePromptEnvironment(options) - return buildPromptDefinitionFromId(promptId, env).paths -} diff --git a/cli/src/public-config-paths.ts b/cli/src/public-config-paths.ts deleted file mode 100644 index 475c3526..00000000 --- a/cli/src/public-config-paths.ts +++ /dev/null @@ -1,208 +0,0 @@ -import type {IDEKind} from './plugins/plugin-core/enums' -import type {ProjectIDEConfigFile} from './plugins/plugin-core/InputTypes' -import * as fs from 'node:fs' -import * as path from 'node:path' -import process from 'node:process' -import {resolvePublicPath} from '@truenine/script-runtime' -import {AINDEX_FILE_NAMES} from './plugins/plugin-core/AindexTypes' -import {FilePathKind} from './plugins/plugin-core/enums' - -export const PUBLIC_CONFIG_DEFINITION_DIR = 'public' -export const PUBLIC_PROXY_FILE_NAME = 'proxy.ts' - -export const PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH = '.gitignore' -export const PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH = '.git/info/exclude' - -export const AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS = [ - AINDEX_FILE_NAMES.QODER_IGNORE, - AINDEX_FILE_NAMES.CURSOR_IGNORE, - AINDEX_FILE_NAMES.WARP_INDEX_IGNORE, - AINDEX_FILE_NAMES.AI_IGNORE, - AINDEX_FILE_NAMES.CODEIUM_IGNORE, - '.kiroignore', - '.traeignore' -] as const - -export const KNOWN_PUBLIC_CONFIG_TARGET_RELATIVE_PATHS = [ - PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, - PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, - AINDEX_FILE_NAMES.EDITOR_CONFIG, - AINDEX_FILE_NAMES.VSCODE_SETTINGS, - AINDEX_FILE_NAMES.VSCODE_EXTENSIONS, - AINDEX_FILE_NAMES.ZED_SETTINGS, - AINDEX_FILE_NAMES.IDEA_PROJECT_XML, - AINDEX_FILE_NAMES.IDEA_CODE_STYLE_CONFIG_XML, - AINDEX_FILE_NAMES.IDEA_GITIGNORE, - ...AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS -] as const - -export interface PublicDefinitionResolveOptions { - readonly command?: ProxyCommand | undefined - readonly workspaceDir?: string | undefined -} - -type ProxyCommand = 'execute' | 'dry-run' | 'clean' | 'plugins' - -interface ProxyContext { - readonly cwd: string - readonly workspaceDir: string - readonly aindexDir: string - readonly command: ProxyCommand - readonly platform: NodeJS.Platform -} - -const publicDefinitionPathCache = new Map() - -function normalizeTargetRelativePath(targetRelativePath: string): string { - const normalizedPath = targetRelativePath - .split(/[\\/]+/) - .filter(segment => segment.length > 0) - .join('/') - - if (normalizedPath.length === 0) - { throw new Error('public target relative path cannot be empty') } - return normalizedPath -} - -function getPublicRootDir(aindexDir: string): string { - return path.join(aindexDir, PUBLIC_CONFIG_DEFINITION_DIR) -} - -function getPublicProxyPath(aindexDir: string): string { - return path.join(getPublicRootDir(aindexDir), PUBLIC_PROXY_FILE_NAME) -} - -function getResolveCommand( - options?: PublicDefinitionResolveOptions -): ProxyCommand { - return options?.command ?? 'execute' -} - -function getResolveWorkspaceDir( - aindexDir: string, - options?: PublicDefinitionResolveOptions -): string { - return path.resolve(options?.workspaceDir ?? path.dirname(aindexDir)) -} - -function buildProxyContext( - aindexDir: string, - workspaceDir: string, - command: ProxyCommand -): ProxyContext { - const resolvedAindexDir = path.resolve(aindexDir) - - return { - cwd: workspaceDir, - workspaceDir, - aindexDir: resolvedAindexDir, - command, - platform: process.platform - } -} - -function resolvePublicPathForDefinition( - filePath: string, - ctx: ProxyContext, - logicalPath: string -): string { - // `tsc` resolves this workspace package correctly, but ESLint's type-aware rules - // sometimes treat it as an error-typed export during monorepo lint execution. - return resolvePublicPath(filePath, ctx, logicalPath) -} - -function resolvePublicDefinitionRelativePath( - aindexDir: string, - targetRelativePath: string, - options?: PublicDefinitionResolveOptions -): string { - const normalizedTargetPath = normalizeTargetRelativePath(targetRelativePath) - if (normalizedTargetPath === PUBLIC_PROXY_FILE_NAME) - { return PUBLIC_PROXY_FILE_NAME } - - const proxyFilePath = getPublicProxyPath(aindexDir) - if (!(fs.existsSync(proxyFilePath) && fs.statSync(proxyFilePath).isFile())) - { return normalizedTargetPath } - - const command = getResolveCommand(options) - const workspaceDir = getResolveWorkspaceDir(aindexDir, options) - const cacheKey = [ - proxyFilePath, - workspaceDir, - command, - normalizedTargetPath - ].join('::') - const cachedPath = publicDefinitionPathCache.get(cacheKey) - if (cachedPath != null) return cachedPath - - const resolvedRelativePath = resolvePublicPathForDefinition( - proxyFilePath, - buildProxyContext(aindexDir, workspaceDir, command), - normalizedTargetPath - ) - - publicDefinitionPathCache.set(cacheKey, resolvedRelativePath) - return resolvedRelativePath -} - -export function resolvePublicDefinitionPath( - aindexDir: string, - targetRelativePath: string, - options?: PublicDefinitionResolveOptions -): string { - const resolvedRelativePath = resolvePublicDefinitionRelativePath( - aindexDir, - targetRelativePath, - options - ) - return path.join( - getPublicRootDir(aindexDir), - ...resolvedRelativePath.split(/[\\/]+/) - ) -} - -export function collectKnownPublicConfigDefinitionPaths( - aindexDir: string, - options?: PublicDefinitionResolveOptions -): string[] { - const resolvedPaths = new Set([ - resolvePublicDefinitionPath(aindexDir, PUBLIC_PROXY_FILE_NAME) - ]) - - for (const targetRelativePath of KNOWN_PUBLIC_CONFIG_TARGET_RELATIVE_PATHS) { - resolvedPaths.add( - resolvePublicDefinitionPath(aindexDir, targetRelativePath, options) - ) - } - - return [...resolvedPaths] -} - -export function readPublicIdeConfigDefinitionFile( - type: T, - targetRelativePath: string, - aindexDir: string, - fs: typeof import('node:fs'), - options?: PublicDefinitionResolveOptions -): ProjectIDEConfigFile | undefined { - const absolutePath = resolvePublicDefinitionPath( - aindexDir, - targetRelativePath, - options - ) - if (!(fs.existsSync(absolutePath) && fs.statSync(absolutePath).isFile())) - { return void 0 } - - const content = fs.readFileSync(absolutePath, 'utf8') - return { - type, - content, - length: content.length, - filePathKind: FilePathKind.Absolute, - dir: { - pathKind: FilePathKind.Absolute, - path: absolutePath, - getDirectoryName: () => path.basename(absolutePath) - } - } -} diff --git a/cli/src/runtime-environment.test.ts b/cli/src/runtime-environment.test.ts deleted file mode 100644 index 0bdeb63f..00000000 --- a/cli/src/runtime-environment.test.ts +++ /dev/null @@ -1,149 +0,0 @@ -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import {afterEach, describe, expect, it} from 'vitest' -import { - getRequiredGlobalConfigPath, - resolveRuntimeEnvironment, - resolveUserPath -} from './runtime-environment' - -describe('runtime environment', () => { - let tempDir: string | undefined - - afterEach(() => { - if (tempDir != null) fs.rmSync(tempDir, {recursive: true, force: true}) - tempDir = void 0 - }) - - it('uses the native Windows home config path when running on Windows', () => { - tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-win-runtime-')) - const windowsHomeDir = path.join(tempDir, 'WindowsHome') - const configPath = path.join(windowsHomeDir, '.aindex', '.tnmsc.json') - - fs.mkdirSync(path.dirname(configPath), {recursive: true}) - fs.writeFileSync(configPath, '{}\n', 'utf8') - - const runtimeEnvironment = resolveRuntimeEnvironment({ - fs, - platform: 'win32', - env: { - USERPROFILE: windowsHomeDir - }, - homedir: windowsHomeDir - }) - - expect(runtimeEnvironment.isWsl).toBe(false) - expect(runtimeEnvironment.selectedGlobalConfigPath).toBeUndefined() - expect(runtimeEnvironment.effectiveHomeDir).toBe(windowsHomeDir) - expect(getRequiredGlobalConfigPath({ - fs, - platform: 'win32', - env: { - USERPROFILE: windowsHomeDir - }, - homedir: windowsHomeDir - })).toBe(configPath) - expect(resolveUserPath('~/.codex/config.toml', { - fs, - platform: 'win32', - env: { - USERPROFILE: windowsHomeDir - }, - homedir: windowsHomeDir - })).toBe(path.win32.join(windowsHomeDir, '.codex', 'config.toml')) - }) - - it('selects the host config path that matches the current Windows profile in WSL', () => { - tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-wsl-runtime-')) - const usersRoot = path.join(tempDir, 'Users') - const alphaConfigPath = path.join(usersRoot, 'alpha', '.aindex', '.tnmsc.json') - const bravoConfigPath = path.join(usersRoot, 'bravo', '.aindex', '.tnmsc.json') - - fs.mkdirSync(path.dirname(alphaConfigPath), {recursive: true}) - fs.mkdirSync(path.dirname(bravoConfigPath), {recursive: true}) - fs.writeFileSync(alphaConfigPath, '{}\n', 'utf8') - fs.writeFileSync(bravoConfigPath, '{}\n', 'utf8') - - const runtimeEnvironment = resolveRuntimeEnvironment({ - fs, - platform: 'linux', - env: { - WSL_DISTRO_NAME: 'Ubuntu', - USERPROFILE: path.join(usersRoot, 'bravo') - }, - homedir: '/home/linux-user', - windowsUsersRoot: usersRoot - }) - - expect(runtimeEnvironment.isWsl).toBe(true) - expect(runtimeEnvironment.selectedGlobalConfigPath).toBe(bravoConfigPath) - expect(runtimeEnvironment.effectiveHomeDir).toBe(path.join(usersRoot, 'bravo').replaceAll('\\', '/')) - expect(getRequiredGlobalConfigPath({ - fs, - platform: 'linux', - env: { - WSL_DISTRO_NAME: 'Ubuntu', - USERPROFILE: path.join(usersRoot, 'bravo') - }, - homedir: '/home/linux-user', - windowsUsersRoot: usersRoot - })).toBe(bravoConfigPath) - }) - - it('fails when the discovered config belongs to another Windows profile', () => { - tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-wsl-runtime-mismatch-')) - const usersRoot = path.join(tempDir, 'Users') - const alphaConfigPath = path.join(usersRoot, 'alpha', '.aindex', '.tnmsc.json') - - fs.mkdirSync(path.dirname(alphaConfigPath), {recursive: true}) - fs.writeFileSync(alphaConfigPath, '{}\n', 'utf8') - - expect(() => getRequiredGlobalConfigPath({ - fs, - platform: 'linux', - env: { - WSL_DISTRO_NAME: 'Ubuntu', - USERPROFILE: path.join(usersRoot, 'bravo') - }, - homedir: '/home/linux-user', - windowsUsersRoot: usersRoot - })).toThrow('current Windows user') - }) - - it('fails when WSL is active but no host config exists', () => { - tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-wsl-runtime-missing-')) - - expect(() => getRequiredGlobalConfigPath({ - fs, - platform: 'linux', - env: {WSL_DISTRO_NAME: 'Ubuntu'}, - homedir: '/home/linux-user', - windowsUsersRoot: path.join(tempDir, 'Users') - })).toThrow('WSL host config file not found') - }) - - it('maps host-home, windows drive, and environment-variable paths for WSL workloads', () => { - const runtimeEnvironment = { - platform: 'linux', - isWsl: true, - nativeHomeDir: '/home/linux-user', - effectiveHomeDir: '/mnt/c/Users/alpha', - globalConfigCandidates: ['/mnt/c/Users/alpha/.aindex/.tnmsc.json'], - selectedGlobalConfigPath: '/mnt/c/Users/alpha/.aindex/.tnmsc.json', - wslHostHomeDir: '/mnt/c/Users/alpha', - windowsUsersRoot: '/mnt/c/Users', - expandedEnv: { - HOME: '/mnt/c/Users/alpha', - USERPROFILE: '/mnt/c/Users/alpha', - HOMEDRIVE: 'C:', - HOMEPATH: '\\Users\\alpha' - } - } as const - - expect(resolveUserPath('~/workspace\\foo', runtimeEnvironment)).toBe('/mnt/c/Users/alpha/workspace/foo') - expect(resolveUserPath('C:\\Work\\Repo', runtimeEnvironment)).toBe('/mnt/c/Work/Repo') - expect(resolveUserPath('%USERPROFILE%\\workspace\\bar', runtimeEnvironment)).toBe('/mnt/c/Users/alpha/workspace/bar') - expect(resolveUserPath('$HOME/workspace/baz', runtimeEnvironment)).toBe('/mnt/c/Users/alpha/workspace/baz') - }) -}) diff --git a/cli/src/runtime-environment.ts b/cli/src/runtime-environment.ts deleted file mode 100644 index 7c2db229..00000000 --- a/cli/src/runtime-environment.ts +++ /dev/null @@ -1,361 +0,0 @@ -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import process from 'node:process' - -export const DEFAULT_WSL_WINDOWS_USERS_ROOT = '/mnt/c/Users' -export const DEFAULT_GLOBAL_CONFIG_DIR = '.aindex' -export const DEFAULT_GLOBAL_CONFIG_FILE_NAME = '.tnmsc.json' - -const WINDOWS_DRIVE_PATH_PATTERN = /^[A-Za-z]:[\\/]/u -const PERCENT_ENV_PATTERN = /%([^%]+)%/gu -const BRACED_ENV_PATTERN = /\$\{([A-Za-z_]\w*)\}/gu -const SHELL_ENV_PATTERN = /\$([A-Za-z_]\w*)/gu - -type RuntimeFs = Pick - -export interface RuntimeEnvironmentDependencies { - readonly fs?: RuntimeFs - readonly env?: NodeJS.ProcessEnv - readonly platform?: NodeJS.Platform - readonly homedir?: string - readonly release?: string - readonly windowsUsersRoot?: string -} - -export interface RuntimeEnvironmentContext { - readonly platform: NodeJS.Platform - readonly isWsl: boolean - readonly nativeHomeDir: string - readonly effectiveHomeDir: string - readonly globalConfigCandidates: readonly string[] - readonly selectedGlobalConfigPath?: string - readonly wslHostHomeDir?: string - readonly windowsUsersRoot: string - readonly expandedEnv: Readonly> -} - -function isRuntimeEnvironmentContext( - value: RuntimeEnvironmentDependencies | RuntimeEnvironmentContext | undefined -): value is RuntimeEnvironmentContext { - return value != null - && 'effectiveHomeDir' in value - && 'expandedEnv' in value -} - -function getFs(dependencies?: RuntimeEnvironmentDependencies): RuntimeFs { - return dependencies?.fs ?? fs -} - -function getPlatform(dependencies?: RuntimeEnvironmentDependencies): NodeJS.Platform { - return dependencies?.platform ?? process.platform -} - -function getRelease(dependencies?: RuntimeEnvironmentDependencies): string { - return dependencies?.release ?? os.release() -} - -function getNativeHomeDir(dependencies?: RuntimeEnvironmentDependencies): string { - return dependencies?.homedir ?? os.homedir() -} - -function getEnv(dependencies?: RuntimeEnvironmentDependencies): NodeJS.ProcessEnv { - return dependencies?.env ?? process.env -} - -function getWindowsUsersRoot(dependencies?: RuntimeEnvironmentDependencies): string { - return dependencies?.windowsUsersRoot ?? DEFAULT_WSL_WINDOWS_USERS_ROOT -} - -function normalizePosixLikePath(rawPath: string): string { - return path.posix.normalize(rawPath.replaceAll('\\', '/')) -} - -function isSameOrChildPath(candidatePath: string, parentPath: string): boolean { - const normalizedCandidate = normalizePosixLikePath(candidatePath) - const normalizedParent = normalizePosixLikePath(parentPath) - - if (normalizedCandidate === normalizedParent) return true - return normalizedCandidate.startsWith(`${normalizedParent}/`) -} - -function resolveWslHostHomeCandidate( - rawPath: string | undefined, - usersRoot: string -): string | undefined { - if (typeof rawPath !== 'string') return void 0 - - const trimmedPath = rawPath.trim() - if (trimmedPath.length === 0) return void 0 - - const candidatePaths = [ - convertWindowsPathToWsl(trimmedPath), - normalizePosixLikePath(trimmedPath) - ] - - for (const candidatePath of candidatePaths) { - if (candidatePath == null) continue - if (isSameOrChildPath(candidatePath, usersRoot)) return normalizePosixLikePath(candidatePath) - } - - return void 0 -} - -function getPreferredWslHostHomeDirs( - dependencies?: RuntimeEnvironmentDependencies -): string[] { - const env = getEnv(dependencies) - const usersRoot = normalizePosixLikePath(getWindowsUsersRoot(dependencies)) - const homeDrive = env['HOMEDRIVE'] - const homePath = env['HOMEPATH'] - const preferredHomeDirs = [ - resolveWslHostHomeCandidate(env['USERPROFILE'], usersRoot), - typeof homeDrive === 'string' && homeDrive.length > 0 && typeof homePath === 'string' && homePath.length > 0 - ? resolveWslHostHomeCandidate(`${homeDrive}${homePath}`, usersRoot) - : void 0, - resolveWslHostHomeCandidate(env['HOME'], usersRoot) - ] - - return [...new Set(preferredHomeDirs.filter((candidate): candidate is string => candidate != null))] -} - -function getWslHostHomeDirForConfigPath(configPath: string): string { - const normalizedConfigPath = normalizePosixLikePath(configPath) - return path.posix.dirname(path.posix.dirname(normalizedConfigPath)) -} - -function selectWslHostGlobalConfigPath( - globalConfigCandidates: readonly string[], - dependencies?: RuntimeEnvironmentDependencies -): string | undefined { - const preferredHomeDirs = getPreferredWslHostHomeDirs(dependencies) - - if (preferredHomeDirs.length <= 0) return globalConfigCandidates.length === 1 ? globalConfigCandidates[0] : void 0 - - for (const preferredHomeDir of preferredHomeDirs) { - const matchedCandidate = globalConfigCandidates.find(candidatePath => - getWslHostHomeDirForConfigPath(candidatePath) === preferredHomeDir) - if (matchedCandidate != null) return matchedCandidate - } - return void 0 -} - -function isDirectory(fsImpl: RuntimeFs, targetPath: string): boolean { - try { - return fsImpl.statSync(targetPath).isDirectory() - } - catch { - return false - } -} - -function isFile(fsImpl: RuntimeFs, targetPath: string): boolean { - try { - return fsImpl.statSync(targetPath).isFile() - } - catch { - return false - } -} - -function getPathModule(platform: NodeJS.Platform): typeof path.posix | typeof path.win32 { - return platform === 'win32' ? path.win32 : path.posix -} - -function buildExpandedEnv( - rawEnv: NodeJS.ProcessEnv, - nativeHomeDir: string, - effectiveHomeDir: string -): Readonly> { - const expandedEnv: Record = {} - - for (const [key, value] of Object.entries(rawEnv)) { - if (typeof value === 'string') expandedEnv[key] = value - } - - if (effectiveHomeDir === nativeHomeDir) return expandedEnv - - expandedEnv['HOME'] = effectiveHomeDir - expandedEnv['USERPROFILE'] = effectiveHomeDir - const hostHomeMatch = /^\/mnt\/([a-zA-Z])\/(.+)$/u.exec(effectiveHomeDir) - if (hostHomeMatch == null) return expandedEnv - - const driveLetter = hostHomeMatch[1] - const relativePath = hostHomeMatch[2] - if (driveLetter == null || relativePath == null) return expandedEnv - expandedEnv['HOMEDRIVE'] = `${driveLetter.toUpperCase()}:` - expandedEnv['HOMEPATH'] = `\\${relativePath.replaceAll('/', '\\')}` - return expandedEnv -} - -function expandEnvironmentVariables( - rawPath: string, - environment: Readonly> -): string { - const replaceValue = (match: string, key: string): string => environment[key] ?? match - - return rawPath - .replaceAll(PERCENT_ENV_PATTERN, replaceValue) - .replaceAll(BRACED_ENV_PATTERN, replaceValue) - .replaceAll(SHELL_ENV_PATTERN, replaceValue) -} - -function expandHomeDirectory( - rawPath: string, - homeDir: string, - platform: NodeJS.Platform -): string { - if (rawPath === '~') return homeDir - if (!(rawPath.startsWith('~/') || rawPath.startsWith('~\\'))) return rawPath - - const pathModule = getPathModule(platform) - const normalizedSuffix = platform === 'win32' - ? rawPath.slice(2).replaceAll('/', '\\') - : rawPath.slice(2).replaceAll('\\', '/') - - return pathModule.resolve(homeDir, normalizedSuffix) -} - -function convertWindowsPathToWsl(rawPath: string): string | undefined { - if (!WINDOWS_DRIVE_PATH_PATTERN.test(rawPath)) return void 0 - - const driveLetter = rawPath.slice(0, 1).toLowerCase() - const relativePath = rawPath - .slice(2) - .replaceAll('\\', '/') - .replace(/^\/+/u, '') - - const basePath = `/mnt/${driveLetter}` - if (relativePath.length === 0) return basePath - return path.posix.join(basePath, relativePath) -} - -function normalizeResolvedPath(rawPath: string, platform: NodeJS.Platform): string { - if (platform === 'win32') return path.win32.normalize(rawPath.replaceAll('/', '\\')) - return path.posix.normalize(rawPath) -} - -export function isWslRuntime( - dependencies?: RuntimeEnvironmentDependencies -): boolean { - if (getPlatform(dependencies) !== 'linux') return false - - const env = getEnv(dependencies) - if (typeof env['WSL_DISTRO_NAME'] === 'string' && env['WSL_DISTRO_NAME'].length > 0) return true - if (typeof env['WSL_INTEROP'] === 'string' && env['WSL_INTEROP'].length > 0) return true - - return getRelease(dependencies).toLowerCase().includes('microsoft') -} - -export function findWslHostGlobalConfigPaths( - dependencies?: RuntimeEnvironmentDependencies -): string[] { - const fsImpl = getFs(dependencies) - const usersRoot = getWindowsUsersRoot(dependencies) - - if (!isDirectory(fsImpl, usersRoot)) return [] - - try { - const dirEntries = fsImpl.readdirSync(usersRoot, {withFileTypes: true}) - const candidates = dirEntries - .filter(dirEntry => dirEntry.isDirectory()) - .map(dirEntry => path.join(usersRoot, dirEntry.name, DEFAULT_GLOBAL_CONFIG_DIR, DEFAULT_GLOBAL_CONFIG_FILE_NAME)) - .filter(candidatePath => fsImpl.existsSync(candidatePath) && isFile(fsImpl, candidatePath)) - - candidates.sort((a, b) => a.localeCompare(b)) - return candidates - } - catch { - return [] - } -} - -export function resolveRuntimeEnvironment( - dependencies?: RuntimeEnvironmentDependencies -): RuntimeEnvironmentContext { - const platform = getPlatform(dependencies) - const nativeHomeDir = getNativeHomeDir(dependencies) - const wslRuntime = isWslRuntime(dependencies) - const globalConfigCandidates = wslRuntime ? findWslHostGlobalConfigPaths(dependencies) : [] - const selectedGlobalConfigPath = wslRuntime - ? selectWslHostGlobalConfigPath(globalConfigCandidates, dependencies) - : void 0 - const effectiveHomeDir = selectedGlobalConfigPath != null - ? getWslHostHomeDirForConfigPath(selectedGlobalConfigPath) - : nativeHomeDir - - return { - platform, - isWsl: wslRuntime, - nativeHomeDir, - effectiveHomeDir, - globalConfigCandidates, - ...selectedGlobalConfigPath != null && {selectedGlobalConfigPath}, - ...selectedGlobalConfigPath != null && {wslHostHomeDir: effectiveHomeDir}, - windowsUsersRoot: getWindowsUsersRoot(dependencies), - expandedEnv: buildExpandedEnv(getEnv(dependencies), nativeHomeDir, effectiveHomeDir) - } -} - -export function getEffectiveHomeDir( - dependencies?: RuntimeEnvironmentDependencies -): string { - return resolveRuntimeEnvironment(dependencies).effectiveHomeDir -} - -export function getGlobalConfigPath( - dependencies?: RuntimeEnvironmentDependencies -): string { - const runtimeEnvironment = resolveRuntimeEnvironment(dependencies) - if (runtimeEnvironment.selectedGlobalConfigPath != null) return runtimeEnvironment.selectedGlobalConfigPath - - return path.join( - runtimeEnvironment.effectiveHomeDir, - DEFAULT_GLOBAL_CONFIG_DIR, - DEFAULT_GLOBAL_CONFIG_FILE_NAME - ) -} - -export function getRequiredGlobalConfigPath( - dependencies?: RuntimeEnvironmentDependencies -): string { - const runtimeEnvironment = resolveRuntimeEnvironment(dependencies) - - if (!runtimeEnvironment.isWsl || runtimeEnvironment.selectedGlobalConfigPath != null) { - return getGlobalConfigPath(dependencies) - } - - const configLookupPattern = `"${runtimeEnvironment.windowsUsersRoot}/*/${DEFAULT_GLOBAL_CONFIG_DIR}/${DEFAULT_GLOBAL_CONFIG_FILE_NAME}"` - if (runtimeEnvironment.globalConfigCandidates.length === 0) { - throw new Error(`WSL host config file not found under ${configLookupPattern}.`) - } - if (getPreferredWslHostHomeDirs(dependencies).length > 0) { - throw new Error(`WSL host config file for the current Windows user was not found under ${configLookupPattern}.`) - } - throw new Error(`WSL host config file could not be matched to the current Windows user under ${configLookupPattern}.`) -} - -export function resolveUserPath( - rawPath: string, - dependenciesOrContext?: RuntimeEnvironmentDependencies | RuntimeEnvironmentContext -): string { - const runtimeEnvironment = isRuntimeEnvironmentContext(dependenciesOrContext) - ? dependenciesOrContext - : resolveRuntimeEnvironment(dependenciesOrContext) - - let resolvedPath = expandEnvironmentVariables(rawPath, runtimeEnvironment.expandedEnv) - resolvedPath = expandHomeDirectory(resolvedPath, runtimeEnvironment.effectiveHomeDir, runtimeEnvironment.platform) - - if (!runtimeEnvironment.isWsl) return normalizeResolvedPath(resolvedPath, runtimeEnvironment.platform) - - const convertedWindowsPath = convertWindowsPathToWsl(resolvedPath) - if (convertedWindowsPath != null) resolvedPath = convertedWindowsPath - else if ( - resolvedPath.startsWith(runtimeEnvironment.effectiveHomeDir) - || resolvedPath.startsWith('/mnt/') - || resolvedPath.startsWith('/') - ) { - resolvedPath = resolvedPath.replaceAll('\\', '/') - } - return normalizeResolvedPath(resolvedPath, runtimeEnvironment.platform) -} diff --git a/cli/src/schema.ts b/cli/src/schema.ts deleted file mode 100644 index 1ff13cf7..00000000 --- a/cli/src/schema.ts +++ /dev/null @@ -1,14 +0,0 @@ -import {zodToJsonSchema} from 'zod-to-json-schema' -import {ZUserConfigFile} from './plugins/plugin-core' - -/** - * JSON Schema for .tnmsc.json — auto-generated from ZUserConfigFile via zod-to-json-schema. - * Do not edit manually; update ZUserConfigFile in types/ConfigTypes.schema.ts instead. - */ -export const TNMSC_JSON_SCHEMA = zodToJsonSchema(ZUserConfigFile, { - name: 'UserConfigFile', - nameStrategy: 'title', - $refStrategy: 'none', - target: 'jsonSchema7', - definitionPath: '$defs' -}) diff --git a/cli/src/script-runtime-worker.ts b/cli/src/script-runtime-worker.ts deleted file mode 100644 index ae6854a6..00000000 --- a/cli/src/script-runtime-worker.ts +++ /dev/null @@ -1,19 +0,0 @@ -import {readFileSync} from 'node:fs' -import process from 'node:process' -import {resolvePublicPathUnchecked} from '@truenine/script-runtime' - -async function main(): Promise { - const [, , filePath, ctxJsonPath, logicalPath] = process.argv - if (filePath == null || ctxJsonPath == null || logicalPath == null) throw new Error('Usage: script-runtime-worker ') - - const ctxJson = readFileSync(ctxJsonPath, 'utf8') - const ctx = JSON.parse(ctxJson) as Parameters[1] - const result = await resolvePublicPathUnchecked(filePath, ctx, logicalPath) - process.stdout.write(`${result}\n`) -} - -main().catch((error: unknown) => { - const message = error instanceof Error ? error.message : String(error) - process.stderr.write(`${message}\n`) - process.exit(1) -}) diff --git a/cli/src/wsl-mirror-sync.test.ts b/cli/src/wsl-mirror-sync.test.ts deleted file mode 100644 index d4af7962..00000000 --- a/cli/src/wsl-mirror-sync.test.ts +++ /dev/null @@ -1,588 +0,0 @@ -import type {ILogger, OutputFileDeclaration, OutputPlugin, OutputWriteContext} from './plugins/plugin-core' -import {Buffer} from 'node:buffer' -import * as path from 'node:path' -import {describe, expect, it, vi} from 'vitest' -import {PluginKind} from './plugins/plugin-core' -import {syncWindowsConfigIntoWsl} from './wsl-mirror-sync' - -class MemoryMirrorFs { - readonly files = new Map() - - readonly directories = new Set() - - private normalizePath(targetPath: string): string { - if (targetPath.includes('\\') || /^[A-Za-z]:[\\/]/u.test(targetPath)) { - return path.win32.normalize(targetPath) - } - - return path.posix.normalize(targetPath) - } - - private getPathModule(targetPath: string): typeof path.win32 | typeof path.posix { - if (targetPath.includes('\\') || /^[A-Za-z]:[\\/]/u.test(targetPath)) { - return path.win32 - } - - return path.posix - } - - existsSync(targetPath: string): boolean { - const normalizedPath = this.normalizePath(targetPath) - return this.files.has(normalizedPath) || this.directories.has(normalizedPath) - } - - mkdirSync(targetPath: string, options?: {recursive?: boolean}): void { - const pathModule = this.getPathModule(targetPath) - const normalizedPath = pathModule.normalize(targetPath) - - if (options?.recursive === true) { - let currentPath = normalizedPath - while (currentPath.length > 0 && !this.directories.has(currentPath)) { - this.directories.add(currentPath) - const parentPath = pathModule.dirname(currentPath) - if (parentPath === currentPath) break - currentPath = parentPath - } - return - } - - this.directories.add(normalizedPath) - } - - readFileSync(targetPath: string): Buffer { - const normalizedPath = this.normalizePath(targetPath) - const content = this.files.get(normalizedPath) - if (content == null) throw new Error(`ENOENT: ${normalizedPath}`) - return Buffer.from(content) - } - - writeFileSync(targetPath: string, data: string | NodeJS.ArrayBufferView): void { - const pathModule = this.getPathModule(targetPath) - const normalizedPath = pathModule.normalize(targetPath) - this.directories.add(pathModule.dirname(normalizedPath)) - - if (typeof data === 'string') { - this.files.set(normalizedPath, Buffer.from(data, 'utf8')) - return - } - - this.files.set(normalizedPath, Buffer.from(data.buffer, data.byteOffset, data.byteLength)) - } - - seedDirectory(targetPath: string): void { - this.directories.add(this.normalizePath(targetPath)) - } - - seedFile(targetPath: string, content: string): void { - const pathModule = this.getPathModule(targetPath) - const normalizedPath = pathModule.normalize(targetPath) - this.directories.add(pathModule.dirname(normalizedPath)) - this.files.set(normalizedPath, Buffer.from(content, 'utf8')) - } -} - -interface RecordedLogger extends ILogger { - readonly infoMessages: string[] -} - -function createLogger(): RecordedLogger { - const infoMessages: string[] = [] - return { - trace: () => {}, - debug: () => {}, - info: (message: unknown) => { - infoMessages.push(String(message)) - }, - warn: () => {}, - error: () => {}, - fatal: () => {}, - infoMessages - } as RecordedLogger -} - -function createMirrorPlugin(sourcePaths: string | readonly string[] = []): OutputPlugin { - const normalizedPaths = Array.isArray(sourcePaths) ? sourcePaths : [sourcePaths] - - return { - type: PluginKind.Output, - name: 'MirrorPlugin', - log: createLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return [] - }, - async convertContent() { - return '' - }, - async declareWslMirrorFiles() { - return normalizedPaths - .filter(sourcePath => sourcePath.length > 0) - .map(sourcePath => ({sourcePath})) - } - } -} - -function createWriteContext(instances?: string | string[], dryRun: boolean = false): OutputWriteContext { - return { - logger: createLogger(), - dryRun, - runtimeTargets: { - jetbrainsCodexDirs: [] - }, - pluginOptions: { - windows: { - wsl2: { - instances - } - } - }, - collectedOutputContext: { - workspace: { - directory: { - pathKind: 'absolute', - path: 'C:\\workspace', - getDirectoryName: () => 'workspace' - }, - projects: [] - } - } - } as unknown as OutputWriteContext -} - -function createPredeclaredOutputs( - plugin: OutputPlugin, - declarations: readonly OutputFileDeclaration[] -): ReadonlyMap { - return new Map([[plugin, declarations]]) -} - -function createGlobalOutputDeclaration( - targetPath: string -): OutputFileDeclaration { - return { - path: targetPath, - scope: 'global', - source: {kind: 'generated'} - } -} - -function createWslSpawnSyncMock( - homesByInstance: Readonly>, - discoveredInstances: readonly string[] = Object.keys(homesByInstance) -) { - return vi.fn((_command: string, args: readonly string[]) => { - if (args[0] === '--list' && args[1] === '--quiet') { - return { - status: 0, - stdout: Buffer.from(discoveredInstances.join('\r\n'), 'utf16le'), - stderr: Buffer.alloc(0) - } - } - - if (args[0] === '-d') { - const instance = args[1] - const linuxHomeDir = instance == null ? void 0 : homesByInstance[instance] - - if (linuxHomeDir == null) { - return { - status: 1, - stdout: Buffer.alloc(0), - stderr: Buffer.from(`distribution "${instance}" not found`, 'utf8') - } - } - - return { - status: 0, - stdout: Buffer.from(linuxHomeDir, 'utf8'), - stderr: Buffer.alloc(0) - } - } - - throw new Error(`Unexpected spawnSync args: ${JSON.stringify(args)}`) - }) -} - -function wasWslListCalled( - spawnSyncMock: ReturnType -): boolean { - return spawnSyncMock.mock.calls.some(([, args]) => Array.isArray(args) && args[0] === '--list' && args[1] === '--quiet') -} - -describe('wsl mirror sync', () => { - it('copies declared host config files into each resolved WSL home', async () => { - const memoryFs = new MemoryMirrorFs() - const hostHomeDir = 'C:\\Users\\alpha' - const sourcePath = path.win32.join(hostHomeDir, '.codex', 'config.toml') - const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' - const targetPath = path.win32.join(targetHomeDir, '.codex', 'config.toml') - - memoryFs.seedFile(sourcePath, 'codex = true\n') - memoryFs.seedDirectory(targetHomeDir) - - const spawnSyncMock = createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) - - const result = await syncWindowsConfigIntoWsl( - [createMirrorPlugin('~/.codex/config.toml')], - createWriteContext('Ubuntu'), - { - fs: memoryFs, - spawnSync: spawnSyncMock as never, - platform: 'win32', - effectiveHomeDir: hostHomeDir - } - ) - - expect(result.errors).toEqual([]) - expect(result.warnings).toEqual([]) - expect(result.mirroredFiles).toBe(1) - expect(memoryFs.readFileSync(targetPath).toString('utf8')).toBe('codex = true\n') - expect(wasWslListCalled(spawnSyncMock)).toBe(false) - }) - - it('copies generated global outputs under the host home into each resolved WSL home', async () => { - const memoryFs = new MemoryMirrorFs() - const hostHomeDir = 'C:\\Users\\alpha' - const sourcePath = path.win32.join(hostHomeDir, '.codex', 'AGENTS.md') - const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' - const targetPath = path.win32.join(targetHomeDir, '.codex', 'AGENTS.md') - const plugin = createMirrorPlugin() - - memoryFs.seedFile(sourcePath, 'global prompt\n') - memoryFs.seedDirectory(targetHomeDir) - - const result = await syncWindowsConfigIntoWsl( - [plugin], - createWriteContext('Ubuntu'), - { - fs: memoryFs, - spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, - platform: 'win32', - effectiveHomeDir: hostHomeDir - }, - createPredeclaredOutputs(plugin, [createGlobalOutputDeclaration(sourcePath)]) - ) - - expect(result.errors).toEqual([]) - expect(result.warnings).toEqual([]) - expect(result.mirroredFiles).toBe(1) - expect(memoryFs.readFileSync(targetPath).toString('utf8')).toBe('global prompt\n') - }) - - it('excludes generated Windows app-data globals from WSL mirroring', async () => { - const memoryFs = new MemoryMirrorFs() - const hostHomeDir = 'C:\\Users\\alpha' - const sourcePath = path.win32.join(hostHomeDir, 'AppData', 'Local', 'JetBrains', 'IntelliJIdea2026.1', 'aia', 'codex', 'AGENTS.md') - const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' - const plugin = createMirrorPlugin() - - memoryFs.seedFile(sourcePath, 'jetbrains prompt\n') - memoryFs.seedDirectory(targetHomeDir) - - const result = await syncWindowsConfigIntoWsl( - [plugin], - createWriteContext('Ubuntu'), - { - fs: memoryFs, - spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, - platform: 'win32', - effectiveHomeDir: hostHomeDir - }, - createPredeclaredOutputs(plugin, [createGlobalOutputDeclaration(sourcePath)]) - ) - - expect(result).toEqual({ - mirroredFiles: 0, - warnings: [], - errors: [] - }) - }) - - it('unions generated globals with declared mirror files and dedupes by source path', async () => { - const memoryFs = new MemoryMirrorFs() - const hostHomeDir = 'C:\\Users\\alpha' - const configPath = path.win32.join(hostHomeDir, '.codex', 'config.toml') - const authPath = path.win32.join(hostHomeDir, '.codex', 'auth.json') - const promptPath = path.win32.join(hostHomeDir, '.codex', 'AGENTS.md') - const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' - const plugin = createMirrorPlugin(['~/.codex/config.toml', '~/.codex/auth.json']) - - memoryFs.seedFile(configPath, 'codex = true\n') - memoryFs.seedFile(authPath, '{"token":"abc"}\n') - memoryFs.seedFile(promptPath, 'global prompt\n') - memoryFs.seedDirectory(targetHomeDir) - - const result = await syncWindowsConfigIntoWsl( - [plugin], - createWriteContext('Ubuntu'), - { - fs: memoryFs, - spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, - platform: 'win32', - effectiveHomeDir: hostHomeDir - }, - createPredeclaredOutputs(plugin, [ - createGlobalOutputDeclaration(configPath), - createGlobalOutputDeclaration(promptPath) - ]) - ) - - expect(result.errors).toEqual([]) - expect(result.warnings).toEqual([]) - expect(result.mirroredFiles).toBe(3) - expect(memoryFs.readFileSync(path.win32.join(targetHomeDir, '.codex', 'config.toml')).toString('utf8')).toBe('codex = true\n') - expect(memoryFs.readFileSync(path.win32.join(targetHomeDir, '.codex', 'auth.json')).toString('utf8')).toBe('{"token":"abc"}\n') - expect(memoryFs.readFileSync(path.win32.join(targetHomeDir, '.codex', 'AGENTS.md')).toString('utf8')).toBe('global prompt\n') - }) - - it('auto-discovers WSL instances when none are configured', async () => { - const memoryFs = new MemoryMirrorFs() - const hostHomeDir = 'C:\\Users\\alpha' - const sourcePath = path.win32.join(hostHomeDir, '.codex', 'config.toml') - const spawnSyncMock = createWslSpawnSyncMock({ - Ubuntu: '/home/alpha', - Debian: '/home/beta' - }, ['Ubuntu', 'Debian']) - - memoryFs.seedFile(sourcePath, 'codex = true\n') - memoryFs.seedDirectory('\\\\wsl$\\Ubuntu\\home\\alpha') - memoryFs.seedDirectory('\\\\wsl$\\Debian\\home\\beta') - - const result = await syncWindowsConfigIntoWsl( - [createMirrorPlugin('~/.codex/config.toml')], - createWriteContext(), - { - fs: memoryFs, - spawnSync: spawnSyncMock as never, - platform: 'win32', - effectiveHomeDir: hostHomeDir - } - ) - - expect(result.errors).toEqual([]) - expect(result.warnings).toEqual([]) - expect(result.mirroredFiles).toBe(2) - expect(wasWslListCalled(spawnSyncMock)).toBe(true) - }) - - it('prefers configured WSL instances over auto-discovery', async () => { - const memoryFs = new MemoryMirrorFs() - const hostHomeDir = 'C:\\Users\\alpha' - const sourcePath = path.win32.join(hostHomeDir, '.codex', 'config.toml') - const spawnSyncMock = createWslSpawnSyncMock({ - Ubuntu: '/home/alpha', - Debian: '/home/beta' - }, ['Ubuntu', 'Debian']) - - memoryFs.seedFile(sourcePath, 'codex = true\n') - memoryFs.seedDirectory('\\\\wsl$\\Ubuntu\\home\\alpha') - - const result = await syncWindowsConfigIntoWsl( - [createMirrorPlugin('~/.codex/config.toml')], - createWriteContext('Ubuntu'), - { - fs: memoryFs, - spawnSync: spawnSyncMock as never, - platform: 'win32', - effectiveHomeDir: hostHomeDir - } - ) - - expect(result.errors).toEqual([]) - expect(result.warnings).toEqual([]) - expect(result.mirroredFiles).toBe(1) - expect(wasWslListCalled(spawnSyncMock)).toBe(false) - }) - - it('warns and skips when a declared host config file does not exist', async () => { - const memoryFs = new MemoryMirrorFs() - memoryFs.seedDirectory('\\\\wsl$\\Ubuntu\\home\\alpha') - - const result = await syncWindowsConfigIntoWsl( - [createMirrorPlugin('~/.claude/settings.json')], - createWriteContext('Ubuntu'), - { - fs: memoryFs, - spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, - platform: 'win32', - effectiveHomeDir: 'C:\\Users\\alpha' - } - ) - - expect(result.errors).toEqual([]) - expect(result.warnings).toEqual([ - 'Skipping missing WSL mirror source file: C:\\Users\\alpha\\.claude\\settings.json' - ]) - expect(result.mirroredFiles).toBe(0) - }) - - it('validates WSL instance probing before writing any mirrored files', async () => { - const memoryFs = new MemoryMirrorFs() - const hostHomeDir = 'C:\\Users\\alpha' - memoryFs.seedFile(path.win32.join(hostHomeDir, '.codex', 'auth.json'), '{"ok":true}\n') - - const result = await syncWindowsConfigIntoWsl( - [createMirrorPlugin('~/.codex/auth.json')], - createWriteContext('BrokenUbuntu'), - { - fs: memoryFs, - spawnSync: vi.fn(() => ({ - status: 1, - stdout: Buffer.alloc(0), - stderr: Buffer.from('distribution not found', 'utf8') - })) as never, - platform: 'win32', - effectiveHomeDir: hostHomeDir - } - ) - - expect(result.mirroredFiles).toBe(0) - expect(result.warnings).toEqual([]) - expect(result.errors).toEqual([ - 'Failed to probe WSL instance "BrokenUbuntu". distribution not found' - ]) - }) - - it('counts dry-run mirror operations without writing explicit mirror files', async () => { - const memoryFs = new MemoryMirrorFs() - const hostHomeDir = 'C:\\Users\\alpha' - const sourcePath = path.win32.join(hostHomeDir, '.claude', 'config.json') - const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' - const targetPath = path.win32.join(targetHomeDir, '.claude', 'config.json') - - memoryFs.seedFile(sourcePath, '{"theme":"dark"}\n') - memoryFs.seedDirectory(targetHomeDir) - - const result = await syncWindowsConfigIntoWsl( - [createMirrorPlugin('~/.claude/config.json')], - createWriteContext('Ubuntu', true), - { - fs: memoryFs, - spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, - platform: 'win32', - effectiveHomeDir: hostHomeDir - } - ) - - expect(result.errors).toEqual([]) - expect(result.mirroredFiles).toBe(1) - expect(memoryFs.existsSync(targetPath)).toBe(false) - }) - - it('counts generated outputs during dry-run even before the host file exists', async () => { - const memoryFs = new MemoryMirrorFs() - const hostHomeDir = 'C:\\Users\\alpha' - const sourcePath = path.win32.join(hostHomeDir, '.codex', 'AGENTS.md') - const targetHomeDir = '\\\\wsl$\\Ubuntu\\home\\alpha' - const targetPath = path.win32.join(targetHomeDir, '.codex', 'AGENTS.md') - const plugin = createMirrorPlugin() - - memoryFs.seedDirectory(targetHomeDir) - - const result = await syncWindowsConfigIntoWsl( - [plugin], - createWriteContext('Ubuntu', true), - { - fs: memoryFs, - spawnSync: createWslSpawnSyncMock({Ubuntu: '/home/alpha'}) as never, - platform: 'win32', - effectiveHomeDir: hostHomeDir - }, - createPredeclaredOutputs(plugin, [createGlobalOutputDeclaration(sourcePath)]) - ) - - expect(result.errors).toEqual([]) - expect(result.warnings).toEqual([]) - expect(result.mirroredFiles).toBe(1) - expect(memoryFs.existsSync(targetPath)).toBe(false) - }) - - it('logs info and skips mirror sync when WSL is unavailable on the host', async () => { - const memoryFs = new MemoryMirrorFs() - const logger = createLogger() - - const result = await syncWindowsConfigIntoWsl( - [createMirrorPlugin('~/.codex/config.toml')], - { - ...createWriteContext('Ubuntu'), - logger - }, - { - fs: memoryFs, - spawnSync: vi.fn(() => ({ - status: null, - stdout: Buffer.alloc(0), - stderr: Buffer.alloc(0), - error: Object.assign(new Error('spawnSync wsl.exe ENOENT'), {code: 'ENOENT'}) - })) as never, - platform: 'win32', - effectiveHomeDir: 'C:\\Users\\alpha' - } - ) - - expect(result).toEqual({ - mirroredFiles: 0, - warnings: [], - errors: [] - }) - expect(logger.infoMessages).toContain('wsl is unavailable, skipping WSL mirror sync') - }) - - it('mirrors declared host config files back into the current WSL home when running inside WSL', async () => { - const memoryFs = new MemoryMirrorFs() - const hostHomeDir = '/mnt/c/Users/alpha' - const nativeHomeDir = '/home/alpha' - const sourcePath = path.posix.join(hostHomeDir, '.codex', 'config.toml') - const targetPath = path.posix.join(nativeHomeDir, '.codex', 'config.toml') - - memoryFs.seedFile(sourcePath, 'codex = true\n') - memoryFs.seedDirectory(nativeHomeDir) - - const result = await syncWindowsConfigIntoWsl( - [createMirrorPlugin('~/.codex/config.toml')], - createWriteContext(), - { - fs: memoryFs, - platform: 'linux', - isWsl: true, - effectiveHomeDir: hostHomeDir, - nativeHomeDir - } - ) - - expect(result.errors).toEqual([]) - expect(result.warnings).toEqual([]) - expect(result.mirroredFiles).toBe(1) - expect(memoryFs.readFileSync(targetPath).toString('utf8')).toBe('codex = true\n') - }) - - it('mirrors generated global outputs back into the current WSL home when running inside WSL', async () => { - const memoryFs = new MemoryMirrorFs() - const hostHomeDir = '/mnt/c/Users/alpha' - const nativeHomeDir = '/home/alpha' - const sourcePath = path.posix.join(hostHomeDir, '.codex', 'AGENTS.md') - const targetPath = path.posix.join(nativeHomeDir, '.codex', 'AGENTS.md') - const plugin = createMirrorPlugin() - - memoryFs.seedFile(sourcePath, 'global prompt\n') - memoryFs.seedDirectory(nativeHomeDir) - - const result = await syncWindowsConfigIntoWsl( - [plugin], - createWriteContext(), - { - fs: memoryFs, - platform: 'linux', - isWsl: true, - effectiveHomeDir: hostHomeDir, - nativeHomeDir - }, - createPredeclaredOutputs(plugin, [createGlobalOutputDeclaration(sourcePath)]) - ) - - expect(result.errors).toEqual([]) - expect(result.warnings).toEqual([]) - expect(result.mirroredFiles).toBe(1) - expect(memoryFs.readFileSync(targetPath).toString('utf8')).toBe('global prompt\n') - }) -}) diff --git a/cli/src/wsl-mirror-sync.ts b/cli/src/wsl-mirror-sync.ts deleted file mode 100644 index 04d4cffc..00000000 --- a/cli/src/wsl-mirror-sync.ts +++ /dev/null @@ -1,656 +0,0 @@ -import type { - ILogger, - OutputFileDeclaration, - OutputPlugin, - OutputWriteContext, - PluginOptions, - WslMirrorFileDeclaration -} from './plugins/plugin-core' -import type {RuntimeEnvironmentContext} from './runtime-environment' -import {Buffer} from 'node:buffer' -import {spawnSync} from 'node:child_process' -import * as fs from 'node:fs' -import * as path from 'node:path' -import process from 'node:process' -import {getEffectiveHomeDir, resolveRuntimeEnvironment, resolveUserPath} from './runtime-environment' - -type MirrorFs = Pick -type SpawnSyncFn = typeof spawnSync -type SpawnSyncResult = ReturnType - -export interface WslMirrorRuntimeDependencies { - readonly fs?: MirrorFs - readonly spawnSync?: SpawnSyncFn - readonly platform?: NodeJS.Platform - readonly effectiveHomeDir?: string - readonly nativeHomeDir?: string - readonly isWsl?: boolean -} - -export interface ResolvedWslInstanceTarget { - readonly instance: string - readonly linuxHomeDir: string - readonly windowsHomeDir: string -} - -export interface WslMirrorSyncResult { - readonly mirroredFiles: number - readonly warnings: readonly string[] - readonly errors: readonly string[] -} - -class WslUnavailableError extends Error {} - -interface ResolvedWslMirrorSource { - readonly kind: 'declared' | 'generated' - readonly sourcePath: string - readonly relativePathSegments: readonly string[] -} - -function getFs(dependencies?: WslMirrorRuntimeDependencies): MirrorFs { - return dependencies?.fs ?? fs -} - -function getSpawnSync(dependencies?: WslMirrorRuntimeDependencies): SpawnSyncFn { - return dependencies?.spawnSync ?? spawnSync -} - -function getPlatform(dependencies?: WslMirrorRuntimeDependencies): NodeJS.Platform { - return dependencies?.platform ?? process.platform -} - -function getHostHomeDir(dependencies?: WslMirrorRuntimeDependencies): string { - return dependencies?.effectiveHomeDir ?? getEffectiveHomeDir() -} - -function getNativeHomeDir(dependencies?: WslMirrorRuntimeDependencies): string { - return dependencies?.nativeHomeDir ?? resolveRuntimeEnvironment().nativeHomeDir -} - -function isWslExecutionRuntime(dependencies?: WslMirrorRuntimeDependencies): boolean { - return dependencies?.isWsl ?? resolveRuntimeEnvironment().isWsl -} - -function getPathModuleForPlatform( - platform: NodeJS.Platform -): typeof path.win32 | typeof path.posix { - return platform === 'win32' ? path.win32 : path.posix -} - -function normalizeInstanceNames( - instances: readonly string[] -): string[] { - return [...new Set(instances.map(instance => instance.trim()).filter(instance => instance.length > 0))] -} - -function normalizeConfiguredInstances( - pluginOptions?: PluginOptions -): string[] { - const configuredInstances = pluginOptions?.windows?.wsl2?.instances - const instanceList = configuredInstances == null - ? [] - : Array.isArray(configuredInstances) - ? configuredInstances - : [configuredInstances] - - return normalizeInstanceNames(instanceList) -} - -function buildWindowsWslHomePath( - instance: string, - linuxHomeDir: string -): string { - if (!linuxHomeDir.startsWith('/')) { - throw new Error(`WSL instance "${instance}" returned a non-absolute home path: "${linuxHomeDir}".`) - } - - const pathSegments = linuxHomeDir.split('/').filter(segment => segment.length > 0) - return path.win32.join(`\\\\wsl$\\${instance}`, ...pathSegments) -} - -function resolveMirroredRelativePathSegments( - sourcePath: string, - hostHomeDir: string, - platform: NodeJS.Platform -): string[] { - const pathModule = getPathModuleForPlatform(platform) - const normalizedHostHome = pathModule.normalize(hostHomeDir) - const normalizedSourcePath = pathModule.normalize(sourcePath) - const relativePath = pathModule.relative(normalizedHostHome, normalizedSourcePath) - - if ( - relativePath.length === 0 - || relativePath.startsWith('..') - || pathModule.isAbsolute(relativePath) - ) { - throw new Error( - `WSL mirror source "${sourcePath}" must stay under the host home directory "${hostHomeDir}".` - ) - } - - return relativePath.split(/[\\/]+/u).filter(segment => segment.length > 0) -} - -function decodeWslCliOutput( - value: unknown -): string { - if (typeof value === 'string') return value - if (!Buffer.isBuffer(value) || value.length === 0) return '' - - const hasUtf16LeBom = value.length >= 2 && value[0] === 0xFF && value[1] === 0xFE - const hasUtf16BeBom = value.length >= 2 && value[0] === 0xFE && value[1] === 0xFF - if (hasUtf16LeBom || hasUtf16BeBom) return value.toString('utf16le').replace(/^\uFEFF/u, '') - - const utf8Text = value.toString('utf8') - if (utf8Text.includes('\u0000')) return value.toString('utf16le').replace(/^\uFEFF/u, '') - return utf8Text -} - -function getSpawnOutputText( - value: unknown -): string { - return decodeWslCliOutput(value).replaceAll('\u0000', '') -} - -function getSpawnSyncErrorCode(result: SpawnSyncResult): string | undefined { - const {error} = result - if (error == null || typeof error !== 'object') return void 0 - return 'code' in error && typeof error.code === 'string' ? error.code : void 0 -} - -function getWslUnavailableReason(result: SpawnSyncResult): string | undefined { - const errorCode = getSpawnSyncErrorCode(result) - if (errorCode === 'ENOENT') return 'wsl.exe is not available on PATH.' - - const combinedOutput = [result.stderr, result.stdout] - .map(value => getSpawnOutputText(value).trim()) - .filter(value => value.length > 0) - .join('\n') - .toLowerCase() - - if (combinedOutput.length === 0) return void 0 - - const unavailableMarkers = [ - 'windows subsystem for linux has no installed distributions', - 'windows subsystem for linux has not been enabled', - 'the windows subsystem for linux optional component is not enabled', - 'wsl is not installed', - 'run \'wsl.exe --install\'', - 'run "wsl.exe --install"', - 'wslregisterdistribution failed with error: 0x8007019e' - ] - - return unavailableMarkers.some(marker => combinedOutput.includes(marker)) - ? combinedOutput - : void 0 -} - -export async function collectDeclaredWslMirrorFiles( - outputPlugins: readonly OutputPlugin[], - ctx: OutputWriteContext -): Promise { - const declarations = await Promise.all(outputPlugins.map(async plugin => { - if (plugin.declareWslMirrorFiles == null) return [] - return plugin.declareWslMirrorFiles(ctx) - })) - - const dedupedDeclarations = new Map() - for (const group of declarations) { - for (const declaration of group) { - dedupedDeclarations.set(declaration.sourcePath, declaration) - } - } - - return [...dedupedDeclarations.values()] -} - -function buildWindowsMirrorPathRuntimeContext( - hostHomeDir: string -): RuntimeEnvironmentContext { - return { - platform: 'win32', - isWsl: false, - nativeHomeDir: hostHomeDir, - effectiveHomeDir: hostHomeDir, - globalConfigCandidates: [], - windowsUsersRoot: '', - expandedEnv: { - HOME: hostHomeDir, - USERPROFILE: hostHomeDir - } - } -} - -function buildWslHostMirrorPathRuntimeContext( - hostHomeDir: string, - nativeHomeDir: string -): RuntimeEnvironmentContext { - return { - platform: 'linux', - isWsl: true, - nativeHomeDir, - effectiveHomeDir: hostHomeDir, - globalConfigCandidates: [], - windowsUsersRoot: '', - expandedEnv: { - HOME: hostHomeDir, - USERPROFILE: hostHomeDir - } - } -} - -function parseWslInstanceList( - rawOutput: string -): string[] { - const instanceList = rawOutput - .split(/\r?\n/u) - .map(line => line.replace(/^\*/u, '').trim()) - .filter(line => line.length > 0) - - return normalizeInstanceNames(instanceList) -} - -function discoverWslInstances( - logger: ILogger, - dependencies?: WslMirrorRuntimeDependencies -): string[] { - const spawnSyncImpl = getSpawnSync(dependencies) - const listResult = spawnSyncImpl('wsl.exe', ['--list', '--quiet'], { - shell: false, - windowsHide: true - }) - - const unavailableReason = getWslUnavailableReason(listResult) - if (unavailableReason != null) throw new WslUnavailableError(unavailableReason) - - if (listResult.status !== 0) { - const stderr = getSpawnOutputText(listResult.stderr).trim() - throw new Error( - `Failed to enumerate WSL instances. ${stderr.length > 0 ? stderr : 'wsl.exe returned a non-zero exit status.'}` - ) - } - - const discoveredInstances = parseWslInstanceList(getSpawnOutputText(listResult.stdout)) - logger.info('discovered wsl instances', { - instances: discoveredInstances - }) - return discoveredInstances -} - -function resolveConfiguredOrDiscoveredInstances( - pluginOptions: Required, - logger: ILogger, - dependencies?: WslMirrorRuntimeDependencies -): string[] { - const configuredInstances = normalizeConfiguredInstances(pluginOptions) - if (configuredInstances.length > 0) return configuredInstances - return discoverWslInstances(logger, dependencies) -} - -function resolveGeneratedWslMirrorSource( - declaration: OutputFileDeclaration, - hostHomeDir: string, - platform: NodeJS.Platform -): ResolvedWslMirrorSource | undefined { - if (declaration.scope !== 'global') return void 0 - - const pathModule = getPathModuleForPlatform(platform) - const sourcePath = pathModule.normalize(declaration.path) - let relativePathSegments: string[] - try { - relativePathSegments = resolveMirroredRelativePathSegments(sourcePath, hostHomeDir, platform) - } - catch { - return void 0 - } - - const [topLevelSegment] = relativePathSegments - - // Mirror home-style tool config roots only. Windows app-data trees such as - // AppData\Local\JetBrains\... stay Windows-only even though they live under the user profile. - if (!topLevelSegment?.startsWith('.')) return void 0 - - return { - kind: 'generated', - sourcePath, - relativePathSegments - } -} - -function collectGeneratedWslMirrorSources( - predeclaredOutputs: ReadonlyMap | undefined, - hostHomeDir: string, - platform: NodeJS.Platform -): readonly ResolvedWslMirrorSource[] { - if (predeclaredOutputs == null) return [] - - const dedupedSources = new Map() - for (const declarations of predeclaredOutputs.values()) { - for (const declaration of declarations) { - const resolvedSource = resolveGeneratedWslMirrorSource(declaration, hostHomeDir, platform) - if (resolvedSource == null) continue - dedupedSources.set(resolvedSource.sourcePath, resolvedSource) - } - } - - return [...dedupedSources.values()] -} - -function resolveDeclaredWslMirrorSource( - declaration: WslMirrorFileDeclaration, - pathRuntimeContext: RuntimeEnvironmentContext, - hostHomeDir: string, - platform: NodeJS.Platform -): ResolvedWslMirrorSource { - const pathModule = getPathModuleForPlatform(platform) - const sourcePath = pathModule.normalize(resolveUserPath(declaration.sourcePath, pathRuntimeContext)) - const relativePathSegments = resolveMirroredRelativePathSegments(sourcePath, hostHomeDir, platform) - - return { - kind: 'declared', - sourcePath, - relativePathSegments - } -} - -function combineWslMirrorSources( - mirrorDeclarations: readonly WslMirrorFileDeclaration[], - generatedMirrorSources: readonly ResolvedWslMirrorSource[], - pathRuntimeContext: RuntimeEnvironmentContext, - hostHomeDir: string, - platform: NodeJS.Platform -): {readonly sources: readonly ResolvedWslMirrorSource[], readonly errors: readonly string[]} { - const dedupedSources = new Map() - const errors: string[] = [] - - for (const declaration of mirrorDeclarations) { - try { - const resolvedSource = resolveDeclaredWslMirrorSource(declaration, pathRuntimeContext, hostHomeDir, platform) - dedupedSources.set(resolvedSource.sourcePath, resolvedSource) - } - catch (error) { - errors.push(error instanceof Error ? error.message : String(error)) - } - } - - for (const source of generatedMirrorSources) { - dedupedSources.set(source.sourcePath, source) - } - - return { - sources: [...dedupedSources.values()], - errors - } -} - -export function resolveWslInstanceTargets( - pluginOptions: Required, - logger: ILogger, - dependencies?: WslMirrorRuntimeDependencies -): ResolvedWslInstanceTarget[] { - if (getPlatform(dependencies) !== 'win32') return [] - - const configuredInstances = resolveConfiguredOrDiscoveredInstances(pluginOptions, logger, dependencies) - if (configuredInstances.length === 0) return [] - - const fsImpl = getFs(dependencies) - const spawnSyncImpl = getSpawnSync(dependencies) - const resolvedTargets: ResolvedWslInstanceTarget[] = [] - - for (const instance of configuredInstances) { - const probeResult = spawnSyncImpl('wsl.exe', ['-d', instance, 'sh', '-lc', 'printf %s "$HOME"'], { - shell: false, - windowsHide: true - }) - - const unavailableReason = getWslUnavailableReason(probeResult) - if (unavailableReason != null) throw new WslUnavailableError(unavailableReason) - - if (probeResult.status !== 0) { - const stderr = getSpawnOutputText(probeResult.stderr).trim() - throw new Error( - `Failed to probe WSL instance "${instance}". ${stderr.length > 0 ? stderr : 'wsl.exe returned a non-zero exit status.'}` - ) - } - - const linuxHomeDir = getSpawnOutputText(probeResult.stdout).trim() - if (linuxHomeDir.length === 0) throw new Error(`WSL instance "${instance}" returned an empty home directory.`) - - const windowsHomeDir = buildWindowsWslHomePath(instance, linuxHomeDir) - if (!fsImpl.existsSync(windowsHomeDir)) { - throw new Error( - `WSL instance "${instance}" home directory is unavailable at "${windowsHomeDir}".` - ) - } - - logger.info('resolved wsl instance home', { - instance, - linuxHomeDir, - windowsHomeDir - }) - - resolvedTargets.push({ - instance, - linuxHomeDir, - windowsHomeDir - }) - } - - return resolvedTargets -} - -function syncResolvedMirrorSourcesIntoCurrentWslHome( - sources: readonly ResolvedWslMirrorSource[], - ctx: OutputWriteContext, - dependencies?: WslMirrorRuntimeDependencies -): WslMirrorSyncResult { - const fsImpl = getFs(dependencies) - const nativeHomeDir = path.posix.normalize(getNativeHomeDir(dependencies)) - let mirroredFiles = 0 - const warnings: string[] = [] - const errors: string[] = [] - - for (const source of sources) { - if (source.kind === 'declared' && !fsImpl.existsSync(source.sourcePath)) { - const warningMessage = `Skipping missing WSL mirror source file: ${source.sourcePath}` - warnings.push(warningMessage) - ctx.logger.warn({ - code: 'WSL_MIRROR_SOURCE_MISSING', - title: 'WSL mirror source file is missing', - rootCause: [warningMessage], - exactFix: [ - 'Create the source file on the Windows host or remove the WSL mirror declaration before retrying tnmsc.' - ] - }) - continue - } - - const targetPath = path.posix.join(nativeHomeDir, ...source.relativePathSegments) - try { - if (ctx.dryRun === true) { - ctx.logger.info('would mirror host config into wsl runtime home', { - sourcePath: source.sourcePath, - targetPath, - dryRun: true - }) - } else { - const content = fsImpl.readFileSync(source.sourcePath) - fsImpl.mkdirSync(path.posix.dirname(targetPath), {recursive: true}) - fsImpl.writeFileSync(targetPath, content) - ctx.logger.info('mirrored host config into wsl runtime home', { - sourcePath: source.sourcePath, - targetPath - }) - } - - mirroredFiles += 1 - } - catch (error) { - errors.push( - `Failed to mirror "${source.sourcePath}" into the current WSL home at "${targetPath}": ${error instanceof Error ? error.message : String(error)}` - ) - } - } - - return { - mirroredFiles, - warnings, - errors - } -} - -export async function syncWindowsConfigIntoWsl( - outputPlugins: readonly OutputPlugin[], - ctx: OutputWriteContext, - dependencies?: WslMirrorRuntimeDependencies, - predeclaredOutputs?: ReadonlyMap -): Promise { - const platform = getPlatform(dependencies) - const wslRuntime = platform === 'linux' && isWslExecutionRuntime(dependencies) - if (platform !== 'win32' && !wslRuntime) { - return { - mirroredFiles: 0, - warnings: [], - errors: [] - } - } - - const hostHomeDir = wslRuntime - ? path.posix.normalize(getHostHomeDir(dependencies)) - : path.win32.normalize(getHostHomeDir(dependencies)) - const mirrorDeclarations = await collectDeclaredWslMirrorFiles(outputPlugins, ctx) - const generatedMirrorSources = collectGeneratedWslMirrorSources(predeclaredOutputs, hostHomeDir, platform) - if (mirrorDeclarations.length === 0 && generatedMirrorSources.length === 0) { - return { - mirroredFiles: 0, - warnings: [], - errors: [] - } - } - - const pluginOptions = (ctx.pluginOptions ?? {}) as Required - const nativeHomeDir = wslRuntime ? path.posix.normalize(getNativeHomeDir(dependencies)) : void 0 - const pathRuntimeContext = wslRuntime - ? buildWslHostMirrorPathRuntimeContext(hostHomeDir, nativeHomeDir ?? hostHomeDir) - : buildWindowsMirrorPathRuntimeContext(hostHomeDir) - const resolvedMirrorSources = combineWslMirrorSources( - mirrorDeclarations, - generatedMirrorSources, - pathRuntimeContext, - hostHomeDir, - platform - ) - - if (wslRuntime) { - if (resolvedMirrorSources.sources.length === 0 || nativeHomeDir == null || hostHomeDir === nativeHomeDir) { - return { - mirroredFiles: 0, - warnings: [], - errors: [...resolvedMirrorSources.errors] - } - } - - const localMirrorResult = syncResolvedMirrorSourcesIntoCurrentWslHome( - resolvedMirrorSources.sources, - ctx, - dependencies - ) - - return { - mirroredFiles: localMirrorResult.mirroredFiles, - warnings: [...localMirrorResult.warnings], - errors: [...resolvedMirrorSources.errors, ...localMirrorResult.errors] - } - } - - let resolvedTargets: ResolvedWslInstanceTarget[] - try { - resolvedTargets = resolveWslInstanceTargets(pluginOptions, ctx.logger, dependencies) - } - catch (error) { - if (error instanceof WslUnavailableError) { - ctx.logger.info('wsl is unavailable, skipping WSL mirror sync', { - reason: error.message - }) - return { - mirroredFiles: 0, - warnings: [], - errors: [] - } - } - - return { - mirroredFiles: 0, - warnings: [], - errors: [error instanceof Error ? error.message : String(error)] - } - } - - if (resolvedTargets.length === 0 || resolvedMirrorSources.sources.length === 0) { - return { - mirroredFiles: 0, - warnings: [], - errors: [...resolvedMirrorSources.errors] - } - } - - const fsImpl = getFs(dependencies) - let mirroredFiles = 0 - const warnings: string[] = [] - const errors: string[] = [...resolvedMirrorSources.errors] - - for (const declaration of resolvedMirrorSources.sources) { - if (declaration.kind === 'declared' && !fsImpl.existsSync(declaration.sourcePath)) { - const warningMessage = `Skipping missing WSL mirror source file: ${declaration.sourcePath}` - warnings.push(warningMessage) - ctx.logger.warn({ - code: 'WSL_MIRROR_SOURCE_MISSING', - title: 'WSL mirror source file is missing', - rootCause: [warningMessage], - exactFix: [ - 'Create the source file on the Windows host or remove the WSL mirror declaration before retrying tnmsc.' - ] - }) - continue - } - - const {relativePathSegments, sourcePath} = declaration - - for (const resolvedTarget of resolvedTargets) { - const targetPath = path.win32.join(resolvedTarget.windowsHomeDir, ...relativePathSegments) - - try { - if (ctx.dryRun === true) { - ctx.logger.info('would mirror windows config into wsl', { - instance: resolvedTarget.instance, - sourcePath, - targetPath, - dryRun: true - }) - } else { - const content = fsImpl.readFileSync(sourcePath) - fsImpl.mkdirSync(path.win32.dirname(targetPath), {recursive: true}) - fsImpl.writeFileSync(targetPath, content) - ctx.logger.info('mirrored windows config into wsl', { - instance: resolvedTarget.instance, - sourcePath, - targetPath - }) - } - - mirroredFiles += 1 - } - catch (error) { - errors.push( - `Failed to mirror "${sourcePath}" into WSL instance "${resolvedTarget.instance}" at "${targetPath}": ${error instanceof Error ? error.message : String(error)}` - ) - } - } - } - - return { - mirroredFiles, - warnings, - errors - } -} From b275aedc40214eab64d7cd0a3fde9fbec117ff5c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Tue, 31 Mar 2026 12:32:30 +0800 Subject: [PATCH 14/27] feat(cli): add config show command and related factories - Implemented `config_show` command to display configuration in JSON format. - Created `ConfigCommandFactory` to handle configuration commands, including showing and setting options. - Added factories for various commands: `CleanCommandFactory`, `DryRunCommandFactory`, `ExecuteCommandFactory`, `HelpCommandFactory`, `InitCommandFactory`, `PluginsCommandFactory`, `UnknownCommandFactory`, and `VersionCommandFactory`. - Introduced `help` command to provide usage instructions for the CLI. - Developed argument parser to handle command-line arguments and subcommands. - Added tests for CLI argument parsing functionality. - Implemented plugin runtime for executing commands and managing output. - Established cleanup functionality with native bindings for file and directory management. --- Cargo.lock | 1 + README.md | 107 +-- SECURITY.md | 19 +- cli/Cargo.toml | 1 + cli/package.json | 1 + cli/scripts/ensure-sdk-build.ts | 42 +- cli/scripts/sync-sdk-dist.ts | 39 +- cli/src/PluginPipeline.ts | 84 ++ {sdk => cli}/src/cli-runtime.test.ts | 24 +- {sdk => cli}/src/cli-runtime.ts | 68 +- cli/src/cli.rs | 37 + cli/src/commands/CleanCommand.ts | 17 + {sdk => cli}/src/commands/Command.ts | 33 +- cli/src/commands/CommandFactory.ts | 17 + {sdk => cli}/src/commands/CommandRegistry.ts | 16 +- cli/src/commands/ConfigCommand.ts | 150 ++++ cli/src/commands/ConfigShowCommand.ts | 21 + .../src/commands/DryRunCleanCommand.ts | 29 +- .../src/commands/DryRunOutputCommand.ts | 26 +- {sdk => cli}/src/commands/ExecuteCommand.ts | 48 +- {sdk => cli}/src/commands/HelpCommand.ts | 13 +- cli/src/commands/InitCommand.test.ts | 76 ++ cli/src/commands/InitCommand.ts | 24 + cli/src/commands/JsonOutputCommand.ts | 31 + cli/src/commands/PluginsCommand.ts | 35 + cli/src/commands/UnknownCommand.ts | 23 + {sdk => cli}/src/commands/VersionCommand.ts | 15 +- cli/src/commands/bridge.rs | 21 + cli/src/commands/config_cmd.rs | 33 + cli/src/commands/config_show.rs | 15 + .../commands/factories/CleanCommandFactory.ts | 7 +- .../factories/ConfigCommandFactory.ts | 11 +- .../factories/DryRunCommandFactory.ts | 7 +- .../factories/ExecuteCommandFactory.ts | 13 + .../commands/factories/HelpCommandFactory.ts | 7 +- .../commands/factories/InitCommandFactory.ts | 3 +- .../factories/PluginsCommandFactory.ts | 7 +- .../factories/UnknownCommandFactory.ts | 7 +- .../factories/VersionCommandFactory.ts | 7 +- {sdk => cli}/src/commands/help.rs | 0 {sdk => cli}/src/commands/mod.rs | 0 {sdk => cli}/src/commands/version.rs | 2 +- cli/src/index.test.ts | 12 +- cli/src/index.ts | 7 +- cli/src/main.rs | 19 +- .../src/pipeline/CliArgumentParser.test.ts | 0 .../src/pipeline/CliArgumentParser.ts | 160 ++-- {sdk => cli}/src/plugin-runtime.ts | 108 ++- cli/src/plugin.config.ts | 70 ++ {sdk => cli}/src/script-runtime-worker.ts | 7 +- cli/tsconfig.json | 4 + cli/tsdown.config.ts | 30 +- cli/vite.config.ts | 9 +- sdk/build.rs | 12 +- sdk/package.json | 6 +- sdk/src/PluginPipeline.test.ts | 60 -- sdk/src/PluginPipeline.ts | 101 --- sdk/src/bridge/node.rs | 16 +- sdk/src/commands/CleanCommand.ts | 34 - sdk/src/commands/CleanupUtils.adapter.test.ts | 156 ---- sdk/src/commands/CleanupUtils.test.ts | 782 ------------------ sdk/src/commands/CommandFactory.ts | 29 - sdk/src/commands/ConfigCommand.ts | 237 ------ sdk/src/commands/ConfigShowCommand.ts | 48 -- sdk/src/commands/InitCommand.test.ts | 78 -- sdk/src/commands/InitCommand.ts | 36 - sdk/src/commands/JsonOutputCommand.ts | 56 -- sdk/src/commands/PluginsCommand.ts | 54 -- .../ProtectedDeletionCommands.test.ts | 277 ------- sdk/src/commands/UnknownCommand.ts | 34 - sdk/src/commands/bridge.rs | 23 - sdk/src/commands/config_cmd.rs | 108 --- sdk/src/commands/config_show.rs | 44 - .../factories/ExecuteCommandFactory.ts | 20 - sdk/src/config.plugins-fast-path.test.ts | 5 +- sdk/src/config.test.ts | 66 +- sdk/src/config.ts | 112 +-- sdk/src/index.test.ts | 4 +- sdk/src/index.ts | 42 +- sdk/src/inputs/runtime.ts | 48 +- sdk/src/lib.rs | 28 +- sdk/src/plugin.config.ts | 58 -- sdk/src/plugins/plugin-core/plugin.ts | 45 +- sdk/src/runtime-command.ts | 1 + sdk/src/runtime/cleanup.ts | 461 +++++++++++ sdk/tsdown.config.ts | 48 +- 86 files changed, 1632 insertions(+), 3060 deletions(-) create mode 100644 cli/src/PluginPipeline.ts rename {sdk => cli}/src/cli-runtime.test.ts (86%) rename {sdk => cli}/src/cli-runtime.ts (67%) create mode 100644 cli/src/commands/CleanCommand.ts rename {sdk => cli}/src/commands/Command.ts (69%) create mode 100644 cli/src/commands/CommandFactory.ts rename {sdk => cli}/src/commands/CommandRegistry.ts (62%) create mode 100644 cli/src/commands/ConfigCommand.ts create mode 100644 cli/src/commands/ConfigShowCommand.ts rename {sdk => cli}/src/commands/DryRunCleanCommand.ts (62%) rename {sdk => cli}/src/commands/DryRunOutputCommand.ts (67%) rename {sdk => cli}/src/commands/ExecuteCommand.ts (55%) rename {sdk => cli}/src/commands/HelpCommand.ts (91%) create mode 100644 cli/src/commands/InitCommand.test.ts create mode 100644 cli/src/commands/InitCommand.ts create mode 100644 cli/src/commands/JsonOutputCommand.ts create mode 100644 cli/src/commands/PluginsCommand.ts create mode 100644 cli/src/commands/UnknownCommand.ts rename {sdk => cli}/src/commands/VersionCommand.ts (59%) create mode 100644 cli/src/commands/bridge.rs create mode 100644 cli/src/commands/config_cmd.rs create mode 100644 cli/src/commands/config_show.rs rename {sdk => cli}/src/commands/factories/CleanCommandFactory.ts (68%) rename {sdk => cli}/src/commands/factories/ConfigCommandFactory.ts (67%) rename {sdk => cli}/src/commands/factories/DryRunCommandFactory.ts (73%) create mode 100644 cli/src/commands/factories/ExecuteCommandFactory.ts rename {sdk => cli}/src/commands/factories/HelpCommandFactory.ts (77%) rename {sdk => cli}/src/commands/factories/InitCommandFactory.ts (86%) rename {sdk => cli}/src/commands/factories/PluginsCommandFactory.ts (73%) rename {sdk => cli}/src/commands/factories/UnknownCommandFactory.ts (72%) rename {sdk => cli}/src/commands/factories/VersionCommandFactory.ts (77%) rename {sdk => cli}/src/commands/help.rs (100%) rename {sdk => cli}/src/commands/mod.rs (100%) rename {sdk => cli}/src/commands/version.rs (64%) rename {sdk => cli}/src/pipeline/CliArgumentParser.test.ts (100%) rename {sdk => cli}/src/pipeline/CliArgumentParser.ts (60%) rename {sdk => cli}/src/plugin-runtime.ts (53%) create mode 100644 cli/src/plugin.config.ts rename {sdk => cli}/src/script-runtime-worker.ts (74%) delete mode 100644 sdk/src/PluginPipeline.test.ts delete mode 100644 sdk/src/PluginPipeline.ts delete mode 100644 sdk/src/commands/CleanCommand.ts delete mode 100644 sdk/src/commands/CleanupUtils.adapter.test.ts delete mode 100644 sdk/src/commands/CleanupUtils.test.ts delete mode 100644 sdk/src/commands/CommandFactory.ts delete mode 100644 sdk/src/commands/ConfigCommand.ts delete mode 100644 sdk/src/commands/ConfigShowCommand.ts delete mode 100644 sdk/src/commands/InitCommand.test.ts delete mode 100644 sdk/src/commands/InitCommand.ts delete mode 100644 sdk/src/commands/JsonOutputCommand.ts delete mode 100644 sdk/src/commands/PluginsCommand.ts delete mode 100644 sdk/src/commands/ProtectedDeletionCommands.test.ts delete mode 100644 sdk/src/commands/UnknownCommand.ts delete mode 100644 sdk/src/commands/bridge.rs delete mode 100644 sdk/src/commands/config_cmd.rs delete mode 100644 sdk/src/commands/config_show.rs delete mode 100644 sdk/src/commands/factories/ExecuteCommandFactory.ts delete mode 100644 sdk/src/plugin.config.ts create mode 100644 sdk/src/runtime-command.ts create mode 100644 sdk/src/runtime/cleanup.ts diff --git a/Cargo.lock b/Cargo.lock index 0eedf855..e6ccdde5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4461,6 +4461,7 @@ name = "tnmsc-cli-shell" version = "2026.10330.118" dependencies = [ "clap", + "serde_json", "tnmsc", "tnmsc-logger", ] diff --git a/README.md b/README.md index 93ba46f6..fcf40e8b 100644 --- a/README.md +++ b/README.md @@ -1,93 +1,96 @@ # memory-sync -Rats 🐀 are like this: even our own brains, even our memories, are things we haul around while running through this fucked-up world!!! +A rat is like this. Even its own brain, even its memory, has to be carried around in this rotten world!!! -I am a rat. No resources will ever be proactively provided to me. -So as a rat, I eat whatever I can reach: maggots in the sewer, leftovers in the slop bucket, and in extreme cases even my own kind—this is the survival mode in a world where resource allocation is brutally unfair. +I am a rat. No resource is going to walk up and offer itself to me. +So as a rat, I eat whatever I can reach: maggots in the sewer, leftovers in a slop bucket, and in extreme cases even my own kind. That is what survival looks like in a world where resource distribution is brutally unequal. `memory-sync` is the same kind of **tool-rat**: -- Does not expect any platform to grant an "official all-in-one solution" -- Does not rely on privileged interfaces of any single IDE / CLI -- Treats every readable config, prompt, and memory file as "edible matter" to be carried, dismantled, and recombined -In this ecosystem, giants monopolise the resources, and developers are thrown into the corner like rats. -`memory-sync` accepts this cruel reality, does not fantasise about fairness, and focuses on one thing only: **to chew up every fragment of resource you already have, and convert it into portable "memory" that can flow between any AI tool.** +- It does not wait for any platform to hand out an "official all-in-one solution" +- It does not rely on the privileged interface of any single IDE or CLI +- Any configuration, prompt, memory file, or generated artifact it can read becomes something edible: something to haul away, break apart, and recombine +In this ecosystem, the giants hoard the resources while developers get thrown into a corner like rats. +`memory-sync` accepts this cruel reality, does not fantasize about fairness, and focuses on one thing only: **to chew through every fragment of resource you already have and turn it into portable "memory" that can flow between AI tools.** ![rat](/.attachments/rat.svg) -What can it help you do? - -- **`.mdx` as the prompt source format**: write your prompts in MDX; `memory-sync` reads, transforms, and writes them into each tool's native config format—you maintain one source, it handles the rest. -- A **universal prompt spec**: write Global / Root / Child / Skill / Command / Agent prompts in a unified structure. -- **Auto-write tool config files**: AGENTS.md, .cursorrules, .kiro/, CLAUDE.md, etc.—if there is an entry point, it stuffs your memory in. -- **Generate copy-ready one-shot prompts**: package project context, tech stack, and current task into AI-friendly Markdown, paste into any chat box directly. -- Like a rat gnawing on cables, **gnaw structured memory out of existing directory structures and config files**, instead of asking you to rewrite everything from scratch. -- **Fine-grained control**: describe rules in YAML / JSON config files, choose what to sync by project, by Agent, by tool type—no "one-size-fits-all" overwrites. -- **Read-only source files**: never modifies your original repository directly, only reads and transforms, then materialises the result on the target tool side. -- **Full wipe**: on sync, erases all stale prompt traces in target tools—prompts are fully computable and auditable, leaving no residue for bad actors. -- **Prompts grow with you only**: memory follows you as a person, not the project. Someone else takes over the project—they cannot take your context. You move to a new project—your accumulated knowledge moves with you intact. +What can it do for you? + +- **Use `.mdx` / `.src.mdx` as the source of truth**: you maintain one source, and `memory-sync` turns it into native tool configs plus managed generated artifacts. +- **Use one unified input-asset model**: Global / Workspace / Project Memory, Skills, Commands, Sub-agents, Rules, README-like outputs, and related assets all fit into one structure. +- **Auto-write native tool configs**: AGENTS.md, Claude Code CLI, Codex CLI, Cursor, Windsurf, Qoder, Trae, Warp, JetBrains AI Assistant Codex, and more. If a native entry point exists, it can write there. +- **Manage derived artifacts**: besides target-tool configs, it can maintain English prompt outputs, skill exports, README-like outputs, and other helper configs. +- **Provide multiple entry points**: the public entry is the `tnmsc` CLI; internally there is also a private SDK, an MCP stdio server, and a Tauri GUI, all working around the same source-of-truth model. +- **Control write scope precisely**: use `outputScopes`, `cleanupProtection`, and related settings to constrain writes and cleanup by project, topic, and tool. +- **Keep source and derived outputs auditable**: source files, generated artifacts, and target-tool configs stay clearly separated. No hidden source edits. No hidden residue. +- **Let memory grow with you**: memory follows you as a person instead of leaking with the project. If a project changes hands, they do not get your context. If you move to another project, your accumulated memory goes with you unchanged. ## Install ```sh -npm install -g @truenine/memory-sync +npm install -g @truenine/memory-sync-cli ``` -## Docs +Optional MCP server: -`https://docs.truenine.org/tnmsc` +```sh +npm install -g @truenine/memory-sync-mcp +``` ## Supported Tools | Type | Tools | | --- | --- | -| IDE | Cursor, Kiro, Windsurf, JetBrains AI | -| CLI | Claude CLI, Gemini CLI, Codex CLI, Warp | +| IDE / Editor | Cursor, Windsurf, Qoder, Trae, Trae CN, JetBrains AI Assistant Codex, Zed, VS Code | +| CLI | Claude Code CLI, OpenAI Codex CLI, Gemini CLI, Droid CLI, Opencode CLI, Warp | +| Other Outputs | AGENTS.md-style outputs, Generic Skills, README-like outputs, `.editorconfig`, `.git/info/exclude` | -More platforms being added continuously. +More platforms are still being added. ## Architecture -- **CLI** (`@truenine/memory-sync`): core sync engine—reads config, writes target tool files, generates copy-ready prompts. -- **Core** (Rust): file I/O, directory traversal, format conversion. -- **Config DSL** (JSON): reads only the global config file `~/.aindex/.tnmsc.json`, which defines sync rules and target tools. -- **GUI** (Tauri): desktop app that calls the CLI as its backend, providing a visual interface. +- **SDK** (`@truenine/memory-sync-sdk` / `tnmsc` crate): the private mixed core for pipeline, prompt service, schema, bridge runtime, and core integration logic. +- **CLI Shell** (`@truenine/memory-sync-cli`): the public `tnmsc` command entry, compatibility export surface, and platform-distribution shell. +- **MCP** (`@truenine/memory-sync-mcp`): an stdio server that exposes prompt-asset management to MCP-capable hosts. +- **Libraries** (`logger`, `md-compiler`, `script-runtime`): Rust-first shared libraries. +- **GUI** (Tauri): the desktop workflow entry, consuming the `tnmsc` crate from `sdk`. ## FAQ **When AI tools finally have a unified standard, what use will this project be?** Then it will have completed its historical mission. -**There's already AGENTS.md, agentskills, and the MCP standard—why do I still need this junk?** +**There's already AGENTS.md, agentskills, and the MCP standard. Why do I still need this junk?** -Native-friendly, plus targeted conditional prompt authoring. +Because native targets still differ, and because conditional prompt authoring still has to land somewhere concrete. -`AGENTS.md` is the spec; `memory-sync` is the hauler—it writes the same memory into the native config formats of a dozen tools simultaneously, sparing you the manual copy-paste grind. +`AGENTS.md` is the format. `memory-sync` is the hauler plus assembler. It writes the same source of truth into multiple native configs and managed artifacts at once, so you do not have to do the copy-paste labor by hand. CLI, SDK, MCP, and GUI are just different entry points around that same model. -**Is there anything in your prompts you don't want to leave behind?** +**Is there anything in the prompt or generated output that I may not want to leave behind?** -Yes. That's why `memory-sync` provides a full-wipe mode: after sync, only the content you explicitly authorised remains in the target tools—everything else is erased. Prompts are fully computable, no hidden residue, no backdoor left for anyone else. +Yes. That is why `memory-sync` gives you cleanup and protection boundaries. After sync, only the managed outputs you explicitly allow should remain. Anything else should either be cleaned or protected on purpose. Prompts and derived artifacts should stay computable, auditable, and residue-free. ## Who is this for To use `memory-sync` you need: -- Solid development experience, years of working with various dev tools -- Proficiency with version control (Git) -- Proficiency with the terminal +- Solid development experience and long-term exposure to dev tools +- Competence with version control (Git) +- Competence with the terminal --- - You are writing code in a forgotten sewer. -No one will proactively feed you, not even a tiny free quota, not even a half-decent document. -- As a rat, you can barely get your hands on anything good: -scurrying between free tiers, trial credits, education discounts, and random third-party scripts. +No one is going to proactively feed you, not even a tiny free quota, not even a decent document. +- As a rat, you were never going to get good food anyway: +you keep scurrying between free tiers, trial credits, education discounts, and random third-party scripts. - What can you do? -Keep darting between IDEs, CLIs, browser extensions, and cloud Agents, copying and pasting the same memory a hundred times. -- You leech API offers from vendors day after day: -today one platform runs a discount so you top up a little; tomorrow another launches a promo so you rush to scrape it. -- Once they have harvested the telemetry, user profiles, and usage patterns they want, -they can kick you—this stinking rat—away at any moment: price hikes, rate limits, account bans, and you have no channel to complain. +Keep darting between IDEs, CLIs, browser extensions, and cloud agents, copying and pasting the same memory a hundred times. +- You keep scraping vendor API deals day after day: +today one platform discounts something, so you top up a little; tomorrow another launches a promotion, so you rush over there too. +- Once they have harvested the telemetry, user profile, and usage pattern they wanted, +they can kick you away at any moment: price hikes, quotas, bans, and no real channel for complaint. If you are barely surviving in this environment, `memory-sync` is built for you: -carry fewer bricks, copy prompts fewer times—at least on the "memory" front, you are no longer completely on the passive receiving end. +to help you carry a little less brick, paste the same prompt a few fewer times, and at least stop being completely passive around "memory". ## Who is NOT welcome @@ -96,17 +99,17 @@ Stable salary, project revenue share, budget to sign official APIs yearly. - And yet you still come down here, competing with us filthy sewer rats for the scraps in the slop bucket. - If you can afford APIs and enterprise plans, go pay for them. -Do things that actually create value—pay properly, give proper feedback, nudge the ecosystem slightly in the right direction. +Do things that actually create value: pay properly, give proper feedback, and nudge the ecosystem slightly in the right direction. - Instead of coming back down -to strip away the tiny gap left for marginalised developers, squeezing out the last crumbs with us rats. +to strip away the tiny gap left for marginalized developers, squeezing out the last crumbs with us rats. - You are a freeloader. -Everything must be pre-chewed and spoon-fed; you won't even touch a terminal. -- You love the grind culture. +Everything must be pre-chewed and spoon-fed; you will not even touch a terminal. +- You love grind culture. Treating "hustle" as virtue, "996" as glory, stepping on peers as a promotion strategy. - You leave no room for others. -Not about whether you share—it's about actively stomping on people, competing maliciously, sustaining your position by suppressing peers, using others' survival space as your stepping stone. +This is not about whether you share everything. It is about actively stomping on people, competing maliciously, and treating other people's survival space as your stepping stone. In other words: -**this is not a tool for optimising capital costs, but a small counterattack prepared for the "rats with no choice" in a world of extreme resource inequality.** +**this is not a tool for optimizing capital cost. It is a small counterattack for the "rats with no choice" in a world of extreme resource inequality.** ## Created by diff --git a/SECURITY.md b/SECURITY.md index d08111a9..aeac0517 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -34,22 +34,23 @@ Don't rush. ## Scope -`memory-sync` is a CLI tool that **reads source files only and writes target configs only**. Its security boundary: +`memory-sync` is now a toolkit made of CLI / SDK / MCP / GUI surfaces, not just a single CLI binary. Its security boundary: -- **Reads**: user `.src.mdx` source files, the global config file (`~/.aindex/.tnmsc.json`) -- **Writes**: target tool config directories (`.cursor/`, `.claude/`, `.kiro/`, etc.) -- **Cleans**: removes stale files from target directories during sync +- **Reads**: user `.src.mdx` source files, project config files, the global config file (`~/.aindex/.tnmsc.json`), and repository metadata needed for sync +- **Writes**: target-tool config directories, managed prompt artifacts such as `dist/`, generated skills / README-like outputs, and related helper configs +- **Cleans**: removes stale managed outputs and target-directory residue during sync or cleanup The following are **out of scope**: - Security vulnerabilities in target AI tools themselves - Compliance of user prompt content -- Supply chain security of third-party plugins (`packages/`) — all plugins are `private` and not published to npm +- Hardening of third-party dependencies, hosted platforms, or the local workstation outside this repository +- External scripts, private plugins, or unmanaged files injected by the user into the workflow ## Design Principles -- **Never modifies source files**: read-only on source; writes only to target -- **Full clean mode**: after sync, only explicitly authorised content remains in target directories — no hidden residue -- **No network requests**: CLI core makes no outbound network requests (version check excepted, and times out gracefully) -- **No telemetry**: no user data collected or reported +- **Separation between source and derived state**: source files, generated artifacts, and target-tool configs must stay clearly separated, auditable, and traceable +- **Cleanup touches managed outputs only**: cleanup should only remove generated outputs or explicitly configured targets, never silently widen its delete boundary +- **No hidden telemetry**: no user data is collected or reported +- **External network behavior must be explicit**: core sync logic must not depend on hidden outbound requests; if release or docs-deploy automation talks to npm, GitHub, or Vercel, that behavior must remain visible in workflow files ## License This project is licensed under [AGPL-3.0](LICENSE). Unauthorised commercial use in violation of the licence will be pursued legally. \ No newline at end of file diff --git a/cli/Cargo.toml b/cli/Cargo.toml index b8f34c00..f7e98554 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -16,3 +16,4 @@ path = "src/main.rs" tnmsc = { workspace = true } tnmsc-logger = { workspace = true } clap = { workspace = true } +serde_json = { workspace = true } diff --git a/cli/package.json b/cli/package.json index 2a148297..bf660702 100644 --- a/cli/package.json +++ b/cli/package.json @@ -67,6 +67,7 @@ "typecheck:run": "tsc --noEmit -p tsconfig.lib.json" }, "dependencies": { + "@truenine/script-runtime": "workspace:*", "json5": "catalog:", "yaml": "catalog:", "zod": "catalog:" diff --git a/cli/scripts/ensure-sdk-build.ts b/cli/scripts/ensure-sdk-build.ts index 84aebe5a..444d4882 100644 --- a/cli/scripts/ensure-sdk-build.ts +++ b/cli/scripts/ensure-sdk-build.ts @@ -1,38 +1,26 @@ #!/usr/bin/env tsx -import {spawnSync} from 'node:child_process' -import {existsSync} from 'node:fs' -import {dirname, resolve} from 'node:path' -import {fileURLToPath} from 'node:url' +import { spawnSync } from "node:child_process"; +import { existsSync } from "node:fs"; +import { dirname, resolve } from "node:path"; +import { fileURLToPath } from "node:url"; -const __dirname = dirname(fileURLToPath(import.meta.url)) -const cliDir = resolve(__dirname, '..') -const workspaceDir = resolve(cliDir, '..') -const sdkDistDir = resolve(cliDir, '../sdk/dist') +const __dirname = dirname(fileURLToPath(import.meta.url)); +const cliDir = resolve(__dirname, ".."); +const workspaceDir = resolve(cliDir, ".."); +const sdkDistDir = resolve(cliDir, "../sdk/dist"); -const REQUIRED_SDK_OUTPUTS = [ - 'index.mjs', - 'index.d.mts', - 'globals.mjs', - 'globals.d.mts', - 'plugin-runtime.mjs', - 'script-runtime-worker.mjs', - 'tnmsc.schema.json' -] as const +const REQUIRED_SDK_OUTPUTS = ["index.mjs", "index.d.mts", "globals.mjs", "globals.d.mts", "tnmsc.schema.json"] as const; function hasRequiredSdkOutputs(): boolean { - return REQUIRED_SDK_OUTPUTS.every(fileName => existsSync(resolve(sdkDistDir, fileName))) + return REQUIRED_SDK_OUTPUTS.every((fileName) => existsSync(resolve(sdkDistDir, fileName))); } if (!hasRequiredSdkOutputs()) { - const result = spawnSync( - 'pnpm', - ['-F', '@truenine/memory-sync-sdk', 'run', 'build'], - { - cwd: workspaceDir, - stdio: 'inherit' - } - ) + const result = spawnSync("pnpm", ["-F", "@truenine/memory-sync-sdk", "run", "build"], { + cwd: workspaceDir, + stdio: "inherit", + }); - process.exit(result.status ?? 1) + process.exit(result.status ?? 1); } diff --git a/cli/scripts/sync-sdk-dist.ts b/cli/scripts/sync-sdk-dist.ts index d8610efe..be4980a8 100644 --- a/cli/scripts/sync-sdk-dist.ts +++ b/cli/scripts/sync-sdk-dist.ts @@ -1,37 +1,32 @@ #!/usr/bin/env tsx -import {cpSync, existsSync, mkdirSync, readdirSync, rmSync} from 'node:fs' -import {dirname, join, resolve} from 'node:path' -import {fileURLToPath} from 'node:url' - -const __dirname = dirname(fileURLToPath(import.meta.url)) -const cliDir = resolve(__dirname, '..') -const sdkDistDir = resolve(cliDir, '../sdk/dist') -const cliDistDir = resolve(cliDir, 'dist') - -const EXACT_FILES = new Set([ - 'babel.cjs', - 'plugin-runtime.mjs', - 'script-runtime-worker.mjs', - 'tnmsc.schema.json' -]) +import { cpSync, existsSync, mkdirSync, readdirSync, rmSync } from "node:fs"; +import { dirname, join, resolve } from "node:path"; +import { fileURLToPath } from "node:url"; + +const __dirname = dirname(fileURLToPath(import.meta.url)); +const cliDir = resolve(__dirname, ".."); +const sdkDistDir = resolve(cliDir, "../sdk/dist"); +const cliDistDir = resolve(cliDir, "dist"); + +const EXACT_FILES = new Set(["tnmsc.schema.json"]); function shouldCopy(fileName: string): boolean { - return EXACT_FILES.has(fileName) || /^jiti-.*\.mjs$/u.test(fileName) + return EXACT_FILES.has(fileName) || /^jiti-.*\.mjs$/u.test(fileName); } if (!existsSync(sdkDistDir)) { - throw new Error(`sdk dist directory is missing: ${sdkDistDir}`) + throw new Error(`sdk dist directory is missing: ${sdkDistDir}`); } -mkdirSync(cliDistDir, {recursive: true}) +mkdirSync(cliDistDir, { recursive: true }); for (const fileName of readdirSync(cliDistDir)) { - if (!shouldCopy(fileName)) continue - rmSync(join(cliDistDir, fileName), {force: true, recursive: true}) + if (!shouldCopy(fileName)) continue; + rmSync(join(cliDistDir, fileName), { force: true, recursive: true }); } for (const fileName of readdirSync(sdkDistDir)) { - if (!shouldCopy(fileName)) continue - cpSync(join(sdkDistDir, fileName), join(cliDistDir, fileName), {recursive: true}) + if (!shouldCopy(fileName)) continue; + cpSync(join(sdkDistDir, fileName), join(cliDistDir, fileName), { recursive: true }); } diff --git a/cli/src/PluginPipeline.ts b/cli/src/PluginPipeline.ts new file mode 100644 index 00000000..b5e15090 --- /dev/null +++ b/cli/src/PluginPipeline.ts @@ -0,0 +1,84 @@ +import type { + ILogger, + OutputCleanContext, + OutputCollectedContext, + OutputPlugin, + OutputRuntimeTargets, + OutputWriteContext, + PipelineConfig, + PluginOptions +} from '@truenine/memory-sync-sdk' +import type {Command, CommandContext, CommandResult} from '@/commands/Command' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {createLogger, discoverOutputRuntimeTargets, setGlobalLogLevel} from '@truenine/memory-sync-sdk' +import {JsonOutputCommand} from '@/commands/JsonOutputCommand' +import {extractUserArgs, parseArgs, resolveCommand} from '@/pipeline/CliArgumentParser' + +export class PluginPipeline { + private readonly logger: ILogger + readonly args: ParsedCliArgs + private outputPlugins: OutputPlugin[] = [] + private runtimeTargets?: OutputRuntimeTargets + + constructor(...cmdArgs: (string | undefined)[]) { + const filtered = cmdArgs.filter((arg): arg is string => arg != null) + this.args = parseArgs(extractUserArgs(filtered)) + if (this.args.logLevel != null) setGlobalLogLevel(this.args.logLevel) + this.logger = createLogger('PluginPipeline', this.args.logLevel) + this.logger.debug('initialized', {args: this.args}) + } + + registerOutputPlugins(plugins: OutputPlugin[]): this { + this.outputPlugins.push(...plugins) + return this + } + + async run(config: PipelineConfig): Promise { + const {context, outputPlugins, userConfigOptions} = config + this.registerOutputPlugins([...outputPlugins]) + let command: Command = resolveCommand(this.args) + + if (!this.args.jsonFlag) return command.execute(this.createCommandContext(context, userConfigOptions)) + + setGlobalLogLevel('silent') + if (!new Set(['config-show', 'plugins']).has(command.name)) command = new JsonOutputCommand(command) + return command.execute(this.createCommandContext(context, userConfigOptions)) + } + + private createCommandContext(ctx: OutputCollectedContext, userConfigOptions: Required): CommandContext { + return { + logger: this.logger, + outputPlugins: this.outputPlugins, + collectedOutputContext: ctx, + userConfigOptions, + createCleanContext: dryRun => this.createCleanContext(ctx, userConfigOptions, dryRun), + createWriteContext: dryRun => this.createWriteContext(ctx, userConfigOptions, dryRun) + } + } + + private createCleanContext(ctx: OutputCollectedContext, userConfigOptions: Required, dryRun: boolean): OutputCleanContext { + return { + logger: this.logger, + collectedOutputContext: ctx, + pluginOptions: userConfigOptions, + runtimeTargets: this.getRuntimeTargets(), + dryRun + } + } + + private createWriteContext(ctx: OutputCollectedContext, userConfigOptions: Required, dryRun: boolean): OutputWriteContext { + return { + logger: this.logger, + collectedOutputContext: ctx, + pluginOptions: userConfigOptions, + runtimeTargets: this.getRuntimeTargets(), + dryRun, + registeredPluginNames: this.outputPlugins.map(plugin => plugin.name) + } + } + + private getRuntimeTargets(): OutputRuntimeTargets { + this.runtimeTargets ??= discoverOutputRuntimeTargets(this.logger) + return this.runtimeTargets + } +} diff --git a/sdk/src/cli-runtime.test.ts b/cli/src/cli-runtime.test.ts similarity index 86% rename from sdk/src/cli-runtime.test.ts rename to cli/src/cli-runtime.test.ts index ab877f20..f7d2f320 100644 --- a/sdk/src/cli-runtime.test.ts +++ b/cli/src/cli-runtime.test.ts @@ -1,10 +1,6 @@ import {afterEach, describe, expect, it, vi} from 'vitest' -const { - createDefaultPluginConfigMock, - pipelineRunMock, - pluginPipelineCtorMock -} = vi.hoisted(() => ({ +const {createDefaultPluginConfigMock, pipelineRunMock, pluginPipelineCtorMock} = vi.hoisted(() => ({ createDefaultPluginConfigMock: vi.fn(), pipelineRunMock: vi.fn(), pluginPipelineCtorMock: vi.fn() @@ -17,9 +13,7 @@ vi.mock('./plugin.config', () => ({ vi.mock('./PluginPipeline', () => ({ PluginPipeline: function MockPluginPipeline(...args: unknown[]) { pluginPipelineCtorMock(...args) - return { - run: pipelineRunMock - } + return {run: pipelineRunMock} } })) @@ -31,9 +25,7 @@ afterEach(() => { describe('cli runtime lightweight commands', () => { it('does not load plugin config for --version', async () => { const {runCli} = await import('./cli-runtime') - const exitCode = await runCli(['node', 'tnmsc', '--version']) - expect(exitCode).toBe(0) expect(createDefaultPluginConfigMock).not.toHaveBeenCalled() expect(pluginPipelineCtorMock).not.toHaveBeenCalled() @@ -43,24 +35,16 @@ describe('cli runtime lightweight commands', () => { it('emits JSON for --version --json without loading plugin config', async () => { const {runCli} = await import('./cli-runtime') const writeSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) - try { const exitCode = await runCli(['node', 'tnmsc', '--version', '--json']) - expect(exitCode).toBe(0) expect(createDefaultPluginConfigMock).not.toHaveBeenCalled() expect(pluginPipelineCtorMock).not.toHaveBeenCalled() expect(pipelineRunMock).not.toHaveBeenCalled() - - const payload = JSON.parse(String(writeSpy.mock.calls[0]?.[0])) as { - readonly success: boolean - readonly message?: string - } - + const payload = JSON.parse(String(writeSpy.mock.calls[0]?.[0])) as {readonly success: boolean, readonly message?: string} expect(payload.success).toBe(true) expect(payload.message).toBe('Version displayed') - } - finally { + } finally { writeSpy.mockRestore() } }) diff --git a/sdk/src/cli-runtime.ts b/cli/src/cli-runtime.ts similarity index 67% rename from sdk/src/cli-runtime.ts rename to cli/src/cli-runtime.ts index 213b8bdf..02f8991b 100644 --- a/sdk/src/cli-runtime.ts +++ b/cli/src/cli-runtime.ts @@ -1,30 +1,41 @@ import type {Command, CommandContext, CommandResult} from '@/commands/Command' import * as path from 'node:path' import process from 'node:process' +import { + buildUnhandledExceptionDiagnostic, + createLogger, + drainBufferedDiagnostics, + FilePathKind, + mergeConfig, + setGlobalLogLevel +} from '@truenine/memory-sync-sdk' import {JsonOutputCommand, toJsonCommandResult} from '@/commands/JsonOutputCommand' -import {buildUnhandledExceptionDiagnostic} from '@/diagnostics' +import {extractUserArgs, parseArgs, resolveCommand} from '@/pipeline/CliArgumentParser' import {PluginPipeline} from '@/PluginPipeline' -import {mergeConfig} from './config' -import {extractUserArgs, parseArgs, resolveCommand} from './pipeline/CliArgumentParser' import {createDefaultPluginConfig} from './plugin.config' -import {createLogger, drainBufferedDiagnostics, FilePathKind, setGlobalLogLevel} from './plugins/plugin-core' const LIGHTWEIGHT_COMMAND_NAMES = new Set(['help', 'version', 'unknown']) export function isJsonMode(argv: readonly string[]): boolean { - return argv.some(arg => arg === '--json' || arg === '-j' || /^-[^-]*j/.test(arg)) + return argv.some(arg => arg === '--json' || arg === '-j' || /^-[^-]*j/u.test(arg)) } function writeJsonFailure(error: unknown): void { - const errorMessage = error instanceof Error ? error.message : String(error) const logger = createLogger('main', 'silent') logger.error(buildUnhandledExceptionDiagnostic('main', error)) - process.stdout.write(`${JSON.stringify(toJsonCommandResult({ - success: false, - filesAffected: 0, - dirsAffected: 0, - message: errorMessage - }, drainBufferedDiagnostics()))}\n`) + process.stdout.write( + `${JSON.stringify( + toJsonCommandResult( + { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: error instanceof Error ? error.message : String(error) + }, + drainBufferedDiagnostics() + ) + )}\n` + ) } function createUnavailableContext(kind: 'cleanup' | 'write'): never { @@ -33,11 +44,7 @@ function createUnavailableContext(kind: 'cleanup' | 'write'): never { function createLightweightCommandContext(logLevel: ReturnType['logLevel']): CommandContext { const workspaceDir = process.cwd() - const userConfigOptions = mergeConfig({ - workspaceDir, - ...logLevel != null ? {logLevel} : {} - }) - + const userConfigOptions = mergeConfig({workspaceDir, ...logLevel != null ? {logLevel} : {}}) return { logger: createLogger('PluginPipeline', logLevel), outputPlugins: [], @@ -57,27 +64,16 @@ function createLightweightCommandContext(logLevel: ReturnType[ } } -function resolveLightweightCommand(argv: readonly string[]): { - readonly command: Command - readonly context: CommandContext -} | undefined { - const filteredArgs = argv.filter((arg): arg is string => arg != null) - const parsedArgs = parseArgs(extractUserArgs(filteredArgs)) +function resolveLightweightCommand(argv: readonly string[]): {readonly command: Command, readonly context: CommandContext} | undefined { + const parsedArgs = parseArgs(extractUserArgs(argv.filter((arg): arg is string => arg != null))) let command: Command = resolveCommand(parsedArgs) - if (!LIGHTWEIGHT_COMMAND_NAMES.has(command.name)) return void 0 - if (parsedArgs.logLevel != null) setGlobalLogLevel(parsedArgs.logLevel) + if (!parsedArgs.jsonFlag) return {command, context: createLightweightCommandContext(parsedArgs.logLevel)} - if (parsedArgs.jsonFlag) { - setGlobalLogLevel('silent') - command = new JsonOutputCommand(command) - } - - return { - command, - context: createLightweightCommandContext(parsedArgs.logLevel) - } + setGlobalLogLevel('silent') + command = new JsonOutputCommand(command) + return {command, context: createLightweightCommandContext(parsedArgs.logLevel)} } export async function runCli(argv: readonly string[] = process.argv): Promise { @@ -92,13 +88,11 @@ export async function runCli(argv: readonly string[] = process.argv): Promise ResolvedCommand { Some(CliCommand::Plugins) => ResolvedCommand::Plugins, } } + +#[cfg(test)] +mod tests { + use super::*; + use clap::Parser; + + #[test] + fn resolve_command_defaults_to_execute() { + let cli = Cli::parse_from(["tnmsc"]); + assert_eq!(resolve_command(&cli), ResolvedCommand::Execute); + } + + #[test] + fn resolve_command_parses_clean_dry_run() { + let cli = Cli::parse_from(["tnmsc", "clean", "--dry-run"]); + assert_eq!(resolve_command(&cli), ResolvedCommand::DryRunClean); + } + + #[test] + fn config_key_value_parsing_combines_flag_and_positional_pairs() { + let cli = Cli::parse_from([ + "tnmsc", + "config", + "--set", + "workspaceDir=/tmp/workspace", + "logLevel=debug", + ]); + + assert_eq!( + resolve_command(&cli), + ResolvedCommand::Config(vec![ + ("workspaceDir".to_string(), "/tmp/workspace".to_string()), + ("logLevel".to_string(), "debug".to_string()), + ]) + ); + } +} diff --git a/cli/src/commands/CleanCommand.ts b/cli/src/commands/CleanCommand.ts new file mode 100644 index 00000000..ec99c8bd --- /dev/null +++ b/cli/src/commands/CleanCommand.ts @@ -0,0 +1,17 @@ +import type {Command, CommandContext, CommandResult} from './Command' +import {performCleanup} from '@truenine/memory-sync-sdk' + +export class CleanCommand implements Command { + readonly name = 'clean' + + async execute(ctx: CommandContext): Promise { + const {logger, outputPlugins, createCleanContext} = ctx + logger.info('running clean pipeline', {command: 'clean'}) + const result = await performCleanup(outputPlugins, createCleanContext(false), logger) + if (result.violations.length > 0 || result.conflicts.length > 0) { + return {success: false, filesAffected: 0, dirsAffected: 0, ...result.message != null ? {message: result.message} : {}} + } + logger.info('clean complete', {deletedFiles: result.deletedFiles, deletedDirs: result.deletedDirs}) + return {success: true, filesAffected: result.deletedFiles, dirsAffected: result.deletedDirs} + } +} diff --git a/sdk/src/commands/Command.ts b/cli/src/commands/Command.ts similarity index 69% rename from sdk/src/commands/Command.ts rename to cli/src/commands/Command.ts index 7f83bc06..789aadb9 100644 --- a/sdk/src/commands/Command.ts +++ b/cli/src/commands/Command.ts @@ -1,16 +1,14 @@ -import type {ILogger, LoggerDiagnosticRecord} from '@truenine/logger' import type { + ILogger, + LoggerDiagnosticRecord, OutputCleanContext, OutputCollectedContext, OutputPlugin, OutputWriteContext, PluginOptions, UserConfigFile -} from '../plugins/plugin-core' +} from '@truenine/memory-sync-sdk' -/** - * Command execution context - */ export interface CommandContext { readonly logger: ILogger readonly outputPlugins: readonly OutputPlugin[] @@ -20,9 +18,6 @@ export interface CommandContext { readonly createWriteContext: (dryRun: boolean) => OutputWriteContext } -/** - * Command execution result - */ export interface CommandResult { readonly success: boolean readonly filesAffected: number @@ -30,10 +25,6 @@ export interface CommandResult { readonly message?: string } -/** - * Per-plugin execution result for JSON output mode. - * Captures individual plugin execution status, timing, and error details. - */ export interface PluginExecutionResult { readonly pluginName: string readonly kind: 'Input' | 'Output' @@ -43,11 +34,6 @@ export interface PluginExecutionResult { readonly duration?: number } -/** - * Structured JSON output for command execution results. - * Extends CommandResult with per-plugin details and error aggregation - * for consumption by Tauri sidecar / external tooling. - */ export interface JsonCommandResult { readonly success: boolean readonly filesAffected: number @@ -58,27 +44,17 @@ export interface JsonCommandResult { readonly errors: readonly LoggerDiagnosticRecord[] } -/** - * JSON output for configuration information. - * Contains the merged config and the source layers that contributed to it. - */ export interface JsonConfigInfo { readonly merged: UserConfigFile readonly sources: readonly ConfigSource[] } -/** - * Describes a single configuration source layer. - */ export interface ConfigSource { readonly path: string readonly layer: 'programmatic' | 'global' | 'default' readonly config: Partial } -/** - * JSON output for plugin information listing. - */ export interface JsonPluginInfo { readonly name: string readonly kind: 'Input' | 'Output' @@ -86,9 +62,6 @@ export interface JsonPluginInfo { readonly dependencies: readonly string[] } -/** - * Base command interface - */ export interface Command { readonly name: string execute: (ctx: CommandContext) => Promise diff --git a/cli/src/commands/CommandFactory.ts b/cli/src/commands/CommandFactory.ts new file mode 100644 index 00000000..27acf7d8 --- /dev/null +++ b/cli/src/commands/CommandFactory.ts @@ -0,0 +1,17 @@ +import type {Command} from './Command' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' + +export interface CommandFactory { + canHandle: (args: ParsedCliArgs) => boolean + createCommand: (args: ParsedCliArgs) => Command +} + +export enum FactoryPriority { + Flags = 0, + Unknown = 1, + Subcommand = 2 +} + +export interface PrioritizedCommandFactory extends CommandFactory { + readonly priority: FactoryPriority +} diff --git a/sdk/src/commands/CommandRegistry.ts b/cli/src/commands/CommandRegistry.ts similarity index 62% rename from sdk/src/commands/CommandRegistry.ts rename to cli/src/commands/CommandRegistry.ts index 91d16351..736055bb 100644 --- a/sdk/src/commands/CommandRegistry.ts +++ b/cli/src/commands/CommandRegistry.ts @@ -3,20 +3,16 @@ import type {CommandFactory, PrioritizedCommandFactory} from './CommandFactory' import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {FactoryPriority} from './CommandFactory' -/** - * Command registry that manages command factories - * Uses priority-based resolution for factory selection - */ export class CommandRegistry { private readonly factories: PrioritizedCommandFactory[] = [] register(factory: PrioritizedCommandFactory): void { this.factories.push(factory) - this.factories.sort((a, b) => a.priority - b.priority) // Sort by priority (lower number = higher priority) + this.factories.sort((a, b) => a.priority - b.priority) } registerWithPriority(factory: CommandFactory, priority: FactoryPriority): void { - const prioritized: PrioritizedCommandFactory = { // Create a wrapper that delegates to the original factory while adding priority + const prioritized: PrioritizedCommandFactory = { priority, canHandle: (args: ParsedCliArgs) => factory.canHandle(args), createCommand: (args: ParsedCliArgs) => factory.createCommand(args) @@ -26,18 +22,18 @@ export class CommandRegistry { } resolve(args: ParsedCliArgs): Command { - for (const factory of this.factories) { // First pass: check prioritized factories (flags, unknown commands) + for (const factory of this.factories) { if (factory.priority <= FactoryPriority.Unknown && factory.canHandle(args)) return factory.createCommand(args) } - for (const factory of this.factories) { // Second pass: check subcommand factories + for (const factory of this.factories) { if (factory.priority === FactoryPriority.Subcommand && factory.canHandle(args)) return factory.createCommand(args) } - for (const factory of this.factories) { // Third pass: use catch-all factory (ExecuteCommandFactory) + for (const factory of this.factories) { if (factory.canHandle(args)) return factory.createCommand(args) } - throw new Error('No command factory found for the given arguments') // This should never happen if ExecuteCommandFactory is registered + throw new Error('No command factory found for the given arguments') } } diff --git a/cli/src/commands/ConfigCommand.ts b/cli/src/commands/ConfigCommand.ts new file mode 100644 index 00000000..90defede --- /dev/null +++ b/cli/src/commands/ConfigCommand.ts @@ -0,0 +1,150 @@ +import type {AindexConfigKeyPath} from '@truenine/memory-sync-sdk' +import type {Command, CommandContext, CommandResult} from './Command' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {AINDEX_CONFIG_KEY_PATHS, buildUsageDiagnostic, diagnosticLines, getRequiredGlobalConfigPath} from '@truenine/memory-sync-sdk' + +type ValidConfigKey = 'workspaceDir' | 'logLevel' | AindexConfigKeyPath +const VALID_CONFIG_KEYS: readonly ValidConfigKey[] = ['workspaceDir', ...AINDEX_CONFIG_KEY_PATHS, 'logLevel'] + +function isValidConfigKey(key: string): key is ValidConfigKey { + return VALID_CONFIG_KEYS.includes(key as ValidConfigKey) +} + +function isValidLogLevel(value: string): boolean { + return ['trace', 'debug', 'info', 'warn', 'error'].includes(value) +} + +type ConfigValue = string | ConfigObject +interface ConfigObject { + [key: string]: ConfigValue | undefined +} + +function readGlobalConfig(): ConfigObject { + const configPath = getRequiredGlobalConfigPath() + if (!fs.existsSync(configPath)) return {} + try { + return JSON.parse(fs.readFileSync(configPath, 'utf8')) as ConfigObject + } catch { + return {} + } +} + +function writeGlobalConfig(config: ConfigObject): void { + const configPath = getRequiredGlobalConfigPath() + const configDir = path.dirname(configPath) + if (!fs.existsSync(configDir)) fs.mkdirSync(configDir, {recursive: true}) + fs.writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, 'utf8') +} + +function setNestedValue(obj: ConfigObject, key: string, value: string): void { + const parts = key.split('.') + let current: ConfigObject = obj + for (let i = 0; i < parts.length - 1; i++) { + const part = parts[i] + if (part == null) continue + const next = current[part] + if (typeof next !== 'object' || next === null || Array.isArray(next)) current[part] = {} + current = current[part] as ConfigObject + } + const lastPart = parts.at(-1) + if (lastPart != null) current[lastPart] = value +} + +function getNestedValue(obj: ConfigObject, key: string): ConfigValue | undefined { + const parts = key.split('.') + let current: ConfigValue | undefined = obj + for (const part of parts) { + if (typeof current !== 'object' || current === null || Array.isArray(current)) return void 0 + current = current[part] + } + return current +} + +export class ConfigCommand implements Command { + readonly name = 'config' + + constructor(private readonly options: readonly [key: string, value: string][]) {} + + async execute(ctx: CommandContext): Promise { + const {logger} = ctx + + if (this.options.length === 0) { + logger.error( + buildUsageDiagnostic({ + code: 'CONFIG_COMMAND_ARGUMENTS_MISSING', + title: 'Config command requires at least one key=value pair', + rootCause: diagnosticLines('tnmsc config was invoked without any configuration assignments.'), + exactFix: diagnosticLines('Run `tnmsc config key=value` with at least one supported configuration key.'), + possibleFixes: [diagnosticLines(`Use one of the supported keys: ${VALID_CONFIG_KEYS.join(', ')}`)], + details: {validKeys: [...VALID_CONFIG_KEYS]} + }) + ) + logger.info('Usage: tnmsc config key=value') + logger.info(`Valid keys: ${VALID_CONFIG_KEYS.join(', ')}`) + return {success: false, filesAffected: 0, dirsAffected: 0, message: 'No options provided'} + } + + let config: ConfigObject + try { + config = readGlobalConfig() + } catch (error) { + return {success: false, filesAffected: 0, dirsAffected: 0, message: error instanceof Error ? error.message : String(error)} + } + + const errors: string[] = [] + const updated: string[] = [] + for (const [key, value] of this.options) { + if (!isValidConfigKey(key)) { + errors.push(`Invalid key: ${key}`) + logger.error( + buildUsageDiagnostic({ + code: 'CONFIG_COMMAND_KEY_INVALID', + title: `Unsupported config key: ${key}`, + rootCause: diagnosticLines(`The config command received "${key}", which is not a supported configuration key.`), + exactFix: diagnosticLines('Use one of the supported config keys and rerun the command.'), + possibleFixes: [diagnosticLines(`Supported keys: ${VALID_CONFIG_KEYS.join(', ')}`)], + details: {key, validKeys: [...VALID_CONFIG_KEYS]} + }) + ) + continue + } + + if (key === 'logLevel' && !isValidLogLevel(value)) { + errors.push(`Invalid logLevel value: ${value}`) + logger.error( + buildUsageDiagnostic({ + code: 'CONFIG_COMMAND_LOG_LEVEL_INVALID', + title: `Unsupported logLevel value: ${value}`, + rootCause: diagnosticLines(`The config command received "${value}" for logLevel, but tnmsc does not support that level.`), + exactFix: diagnosticLines('Set logLevel to one of: trace, debug, info, warn, or error.'), + details: {key, value, validLevels: ['trace', 'debug', 'info', 'warn', 'error']} + }) + ) + continue + } + + const oldValue = getNestedValue(config, key) + setNestedValue(config, key, value) + if (oldValue !== value) updated.push(`${key}=${value}`) + logger.info('configuration updated', {key, value}) + } + + if (updated.length > 0) { + try { + writeGlobalConfig(config) + } catch (error) { + return {success: false, filesAffected: 0, dirsAffected: 0, message: error instanceof Error ? error.message : String(error)} + } + logger.info('global config written', {path: getRequiredGlobalConfigPath()}) + } + + const success = errors.length === 0 + return { + success, + filesAffected: updated.length > 0 ? 1 : 0, + dirsAffected: 0, + message: success ? `Configuration updated: ${updated.join(', ')}` : `Partial update: ${updated.join(', ')}. Errors: ${errors.join(', ')}` + } + } +} diff --git a/cli/src/commands/ConfigShowCommand.ts b/cli/src/commands/ConfigShowCommand.ts new file mode 100644 index 00000000..f07072d6 --- /dev/null +++ b/cli/src/commands/ConfigShowCommand.ts @@ -0,0 +1,21 @@ +import type {Command, CommandContext, CommandResult, ConfigSource, JsonConfigInfo} from './Command' +import process from 'node:process' +import {ConfigLoader} from '@truenine/memory-sync-sdk' + +export class ConfigShowCommand implements Command { + readonly name = 'config-show' + + async execute(ctx: CommandContext): Promise { + const {logger} = ctx + const loader = new ConfigLoader() + const mergedResult = loader.load() + const sources: ConfigSource[] = mergedResult.sources.map(sourcePath => { + const loaded = loader.loadFromFile(sourcePath) + return {path: sourcePath, layer: 'global', config: loaded.config} + }) + const configInfo: JsonConfigInfo = {merged: mergedResult.config, sources} + process.stdout.write(`${JSON.stringify(configInfo)}\n`) + logger.info('config shown', {sources: mergedResult.sources.length}) + return {success: true, filesAffected: 0, dirsAffected: 0, message: `Configuration displayed (${sources.length} source(s))`} + } +} diff --git a/sdk/src/commands/DryRunCleanCommand.ts b/cli/src/commands/DryRunCleanCommand.ts similarity index 62% rename from sdk/src/commands/DryRunCleanCommand.ts rename to cli/src/commands/DryRunCleanCommand.ts index 72ce58c5..12b20913 100644 --- a/sdk/src/commands/DryRunCleanCommand.ts +++ b/cli/src/commands/DryRunCleanCommand.ts @@ -1,22 +1,15 @@ import type {Command, CommandContext, CommandResult} from './Command' import * as path from 'node:path' -import {collectAllPluginOutputs} from '../plugins/plugin-core' -import {logProtectedDeletionGuardError} from '../ProtectedDeletionGuard' -import {collectDeletionTargets} from './CleanupUtils' +import {collectAllPluginOutputs, collectDeletionTargets, logProtectedDeletionGuardError} from '@truenine/memory-sync-sdk' -/** - * Dry-run clean command - simulates clean operations without actual deletion - */ export class DryRunCleanCommand implements Command { readonly name = 'dry-run-clean' async execute(ctx: CommandContext): Promise { const {logger, outputPlugins, createCleanContext} = ctx logger.info('running clean pipeline', {command: 'dry-run-clean', dryRun: true}) - const cleanCtx = createCleanContext(true) const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx) - logger.info('collected outputs for cleanup', { dryRun: true, projectDirs: outputs.projectDirs.length, @@ -38,8 +31,9 @@ export class DryRunCleanCommand implements Command { } } - this.logDryRunFiles(filesToDelete, logger) - this.logDryRunDirectories(totalDirsToDelete, logger) + for (const file of filesToDelete) logger.info('would delete file', {path: path.isAbsolute(file) ? file : path.resolve(file), dryRun: true}) + for (const dir of [...totalDirsToDelete].sort((a, b) => b.length - a.length)) + { logger.info('would delete directory', {path: path.isAbsolute(dir) ? dir : path.resolve(dir), dryRun: true}) } logger.info('clean complete', { dryRun: true, @@ -56,19 +50,4 @@ export class DryRunCleanCommand implements Command { message: 'Dry-run complete, no files were deleted' } } - - private logDryRunFiles(files: string[], logger: CommandContext['logger']): void { - for (const file of files) { - const resolved = path.isAbsolute(file) ? file : path.resolve(file) - logger.info('would delete file', {path: resolved, dryRun: true}) - } - } - - private logDryRunDirectories(dirs: string[], logger: CommandContext['logger']): void { - const sortedDirs = [...dirs].sort((a, b) => b.length - a.length) - for (const dir of sortedDirs) { - const resolved = path.isAbsolute(dir) ? dir : path.resolve(dir) - logger.info('would delete directory', {path: resolved, dryRun: true}) - } - } } diff --git a/sdk/src/commands/DryRunOutputCommand.ts b/cli/src/commands/DryRunOutputCommand.ts similarity index 67% rename from sdk/src/commands/DryRunOutputCommand.ts rename to cli/src/commands/DryRunOutputCommand.ts index 180501f6..fbf92733 100644 --- a/sdk/src/commands/DryRunOutputCommand.ts +++ b/cli/src/commands/DryRunOutputCommand.ts @@ -1,20 +1,12 @@ import type {Command, CommandContext, CommandResult} from './Command' -import {syncWindowsConfigIntoWsl} from '@/wsl-mirror-sync' -import { - collectOutputDeclarations, - executeDeclarativeWriteOutputs -} from '../plugins/plugin-core' +import {collectOutputDeclarations, executeDeclarativeWriteOutputs, syncWindowsConfigIntoWsl} from '@truenine/memory-sync-sdk' -/** - * Dry-run output command - simulates write operations without actual I/O - */ export class DryRunOutputCommand implements Command { readonly name = 'dry-run-output' async execute(ctx: CommandContext): Promise { const {logger, outputPlugins, createWriteContext} = ctx logger.info('started', {command: 'dry-run-output', dryRun: true}) - const writeCtx = createWriteContext(true) const predeclaredOutputs = await collectOutputDeclarations(outputPlugins, writeCtx) const results = await executeDeclarativeWriteOutputs(outputPlugins, writeCtx, predeclaredOutputs) @@ -29,23 +21,11 @@ export class DryRunOutputCommand implements Command { const wslMirrorResult = await syncWindowsConfigIntoWsl(outputPlugins, writeCtx, void 0, predeclaredOutputs) if (wslMirrorResult.errors.length > 0) { - return { - success: false, - filesAffected: totalFiles, - dirsAffected: totalDirs, - message: wslMirrorResult.errors.join('\n') - } + return {success: false, filesAffected: totalFiles, dirsAffected: totalDirs, message: wslMirrorResult.errors.join('\n')} } totalFiles += wslMirrorResult.mirroredFiles - logger.info('complete', {command: 'dry-run-output', totalFiles, totalDirs, dryRun: true}) - - return { - success: true, - filesAffected: totalFiles, - dirsAffected: totalDirs, - message: 'Dry-run complete, no files were written' - } + return {success: true, filesAffected: totalFiles, dirsAffected: totalDirs, message: 'Dry-run complete, no files were written'} } } diff --git a/sdk/src/commands/ExecuteCommand.ts b/cli/src/commands/ExecuteCommand.ts similarity index 55% rename from sdk/src/commands/ExecuteCommand.ts rename to cli/src/commands/ExecuteCommand.ts index 8f4c1c96..2a100c6f 100644 --- a/sdk/src/commands/ExecuteCommand.ts +++ b/cli/src/commands/ExecuteCommand.ts @@ -1,15 +1,6 @@ import type {Command, CommandContext, CommandResult} from './Command' -import {syncWindowsConfigIntoWsl} from '@/wsl-mirror-sync' -import { - collectOutputDeclarations, - executeDeclarativeWriteOutputs -} from '../plugins/plugin-core' -import {performCleanup} from './CleanupUtils' +import {collectOutputDeclarations, executeDeclarativeWriteOutputs, performCleanup, syncWindowsConfigIntoWsl} from '@truenine/memory-sync-sdk' -/** - * Execute command - performs actual write operations - * Includes pre-cleanup to remove stale files before writing new outputs - */ export class ExecuteCommand implements Command { readonly name = 'execute' @@ -19,21 +10,13 @@ export class ExecuteCommand implements Command { const writeCtx = createWriteContext(false) const predeclaredOutputs = await collectOutputDeclarations(outputPlugins, writeCtx) - const cleanCtx = createCleanContext(false) // Step 1: Pre-cleanup (non-dry-run only) - const cleanupResult = await performCleanup(outputPlugins, cleanCtx, logger, predeclaredOutputs) - + const cleanupResult = await performCleanup(outputPlugins, createCleanContext(false), logger, predeclaredOutputs) if (cleanupResult.violations.length > 0 || cleanupResult.conflicts.length > 0) { - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - ...cleanupResult.message != null ? {message: cleanupResult.message} : {} - } + return {success: false, filesAffected: 0, dirsAffected: 0, ...cleanupResult.message != null ? {message: cleanupResult.message} : {}} } logger.info('cleanup complete', {deletedFiles: cleanupResult.deletedFiles, deletedDirs: cleanupResult.deletedDirs}) - - const results = await executeDeclarativeWriteOutputs(outputPlugins, writeCtx, predeclaredOutputs) // Step 2: Write outputs + const results = await executeDeclarativeWriteOutputs(outputPlugins, writeCtx, predeclaredOutputs) let totalFiles = 0 let totalDirs = 0 @@ -47,33 +30,16 @@ export class ExecuteCommand implements Command { } if (writeErrors.length > 0) { - return { - success: false, - filesAffected: totalFiles, - dirsAffected: totalDirs, - message: writeErrors.join('\n') - } + return {success: false, filesAffected: totalFiles, dirsAffected: totalDirs, message: writeErrors.join('\n')} } const wslMirrorResult = await syncWindowsConfigIntoWsl(outputPlugins, writeCtx, void 0, predeclaredOutputs) - if (wslMirrorResult.errors.length > 0) { - return { - success: false, - filesAffected: totalFiles, - dirsAffected: totalDirs, - message: wslMirrorResult.errors.join('\n') - } + return {success: false, filesAffected: totalFiles, dirsAffected: totalDirs, message: wslMirrorResult.errors.join('\n')} } totalFiles += wslMirrorResult.mirroredFiles - logger.info('complete', {command: 'execute', pluginCount: results.size}) - - return { - success: true, - filesAffected: totalFiles, - dirsAffected: totalDirs - } + return {success: true, filesAffected: totalFiles, dirsAffected: totalDirs} } } diff --git a/sdk/src/commands/HelpCommand.ts b/cli/src/commands/HelpCommand.ts similarity index 91% rename from sdk/src/commands/HelpCommand.ts rename to cli/src/commands/HelpCommand.ts index ae7201d1..1ca4f8f9 100644 --- a/sdk/src/commands/HelpCommand.ts +++ b/cli/src/commands/HelpCommand.ts @@ -1,5 +1,5 @@ import type {Command, CommandContext, CommandResult} from './Command' -import {AINDEX_CONFIG_KEY_PATHS} from '@/plugins/plugin-core' +import {AINDEX_CONFIG_KEY_PATHS} from '@truenine/memory-sync-sdk' import {getCliVersion} from './VersionCommand' const CLI_NAME = 'tnmsc' @@ -58,20 +58,11 @@ CONFIGURATION: See documentation for detailed configuration options. `.trim() -/** - * Help command - displays CLI usage information - */ export class HelpCommand implements Command { readonly name = 'help' async execute(ctx: CommandContext): Promise { ctx.logger.info(HELP_TEXT) - - return { - success: true, - filesAffected: 0, - dirsAffected: 0, - message: 'Help displayed' - } + return {success: true, filesAffected: 0, dirsAffected: 0, message: 'Help displayed'} } } diff --git a/cli/src/commands/InitCommand.test.ts b/cli/src/commands/InitCommand.test.ts new file mode 100644 index 00000000..0b265c43 --- /dev/null +++ b/cli/src/commands/InitCommand.test.ts @@ -0,0 +1,76 @@ +import type {CommandContext} from './Command' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {createLogger, FilePathKind, mergeConfig} from '@truenine/memory-sync-sdk' +import {describe, expect, it} from 'vitest' +import {InitCommand} from './InitCommand' + +function createCommandContext(): CommandContext { + const workspaceDir = path.resolve('tmp-init-command') + const userConfigOptions = mergeConfig({workspaceDir}) + + return { + logger: createLogger('InitCommandTest', 'error'), + outputPlugins: [], + userConfigOptions, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir) + }, + projects: [] + } + }, + createCleanContext: dryRun => + ({ + logger: createLogger('InitCommandTest', 'error'), + fs, + path, + glob: {} as never, + runtimeTargets: {jetbrainsCodexDirs: []}, + dryRun, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir) + }, + projects: [] + } + } + }) as unknown as CommandContext['createCleanContext'] extends (dryRun: boolean) => infer T ? T : never, + createWriteContext: dryRun => + ({ + logger: createLogger('InitCommandTest', 'error'), + fs, + path, + glob: {} as never, + runtimeTargets: {jetbrainsCodexDirs: []}, + dryRun, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir) + }, + projects: [] + } + } + }) as unknown as CommandContext['createWriteContext'] extends (dryRun: boolean) => infer T ? T : never + } +} + +describe('init command', () => { + it('returns a deprecation failure without creating files', async () => { + const result = await new InitCommand().execute(createCommandContext()) + expect(result.success).toBe(false) + expect(result.filesAffected).toBe(0) + expect(result.dirsAffected).toBe(0) + expect(result.message).toContain('deprecated') + expect(result.message).toContain('~/workspace/aindex/public/') + }) +}) diff --git a/cli/src/commands/InitCommand.ts b/cli/src/commands/InitCommand.ts new file mode 100644 index 00000000..54ef706f --- /dev/null +++ b/cli/src/commands/InitCommand.ts @@ -0,0 +1,24 @@ +import type {Command, CommandContext, CommandResult} from './Command' +import {buildUsageDiagnostic, diagnosticLines} from '@truenine/memory-sync-sdk' + +const INIT_DEPRECATION_MESSAGE + = '`tnmsc init` is deprecated and no longer initializes aindex. Maintain the public target-relative definitions manually under `~/workspace/aindex/public/`.' + +export class InitCommand implements Command { + readonly name = 'init' + + async execute(ctx: CommandContext): Promise { + const {logger} = ctx + logger.warn( + buildUsageDiagnostic({ + code: 'INIT_COMMAND_DEPRECATED', + title: 'The init command is deprecated', + rootCause: diagnosticLines('`tnmsc init` no longer initializes aindex content or project definitions.'), + exactFix: diagnosticLines('Maintain the target-relative definitions manually under `~/workspace/aindex/public/`.'), + possibleFixes: [diagnosticLines('Run `tnmsc help` to find a supported replacement command for your workflow.')], + details: {command: 'init'} + }) + ) + return {success: false, filesAffected: 0, dirsAffected: 0, message: INIT_DEPRECATION_MESSAGE} + } +} diff --git a/cli/src/commands/JsonOutputCommand.ts b/cli/src/commands/JsonOutputCommand.ts new file mode 100644 index 00000000..6d49cea0 --- /dev/null +++ b/cli/src/commands/JsonOutputCommand.ts @@ -0,0 +1,31 @@ +import type {Command, CommandContext, CommandResult, JsonCommandResult} from './Command' +import process from 'node:process' +import {clearBufferedDiagnostics, drainBufferedDiagnostics, partitionBufferedDiagnostics} from '@truenine/memory-sync-sdk' + +export class JsonOutputCommand implements Command { + readonly name: string + + constructor(private readonly inner: Command) { + this.name = `json:${inner.name}` + } + + async execute(ctx: CommandContext): Promise { + clearBufferedDiagnostics() + const result = await this.inner.execute(ctx) + process.stdout.write(`${JSON.stringify(toJsonCommandResult(result, drainBufferedDiagnostics()))}\n`) + return result + } +} + +export function toJsonCommandResult(result: CommandResult, diagnostics = drainBufferedDiagnostics()): JsonCommandResult { + const {warnings, errors} = partitionBufferedDiagnostics(diagnostics) + return { + success: result.success, + filesAffected: result.filesAffected, + dirsAffected: result.dirsAffected, + ...result.message != null && {message: result.message}, + pluginResults: [], + warnings, + errors + } +} diff --git a/cli/src/commands/PluginsCommand.ts b/cli/src/commands/PluginsCommand.ts new file mode 100644 index 00000000..35fd6d0e --- /dev/null +++ b/cli/src/commands/PluginsCommand.ts @@ -0,0 +1,35 @@ +import type {Command, CommandContext, CommandResult, JsonPluginInfo} from './Command' +import process from 'node:process' + +export class PluginsCommand implements Command { + readonly name = 'plugins' + + async execute(ctx: CommandContext): Promise { + const {logger, outputPlugins, userConfigOptions} = ctx + const pluginInfos: JsonPluginInfo[] = [] + + for (const plugin of userConfigOptions.plugins) { + pluginInfos.push({ + name: plugin.name, + kind: 'Output', + description: plugin.name, + dependencies: [...plugin.dependsOn ?? []] + }) + } + + const registeredNames = new Set(pluginInfos.map(plugin => plugin.name)) + for (const plugin of outputPlugins) { + if (registeredNames.has(plugin.name)) continue + pluginInfos.push({ + name: plugin.name, + kind: 'Output', + description: plugin.name, + dependencies: [...plugin.dependsOn ?? []] + }) + } + + process.stdout.write(`${JSON.stringify(pluginInfos)}\n`) + logger.info('plugins listed', {count: pluginInfos.length}) + return {success: true, filesAffected: 0, dirsAffected: 0, message: `Listed ${pluginInfos.length} plugin(s)`} + } +} diff --git a/cli/src/commands/UnknownCommand.ts b/cli/src/commands/UnknownCommand.ts new file mode 100644 index 00000000..c8ec4a05 --- /dev/null +++ b/cli/src/commands/UnknownCommand.ts @@ -0,0 +1,23 @@ +import type {Command, CommandContext, CommandResult} from './Command' +import {buildUsageDiagnostic, diagnosticLines} from '@truenine/memory-sync-sdk' + +export class UnknownCommand implements Command { + readonly name = 'unknown' + + constructor(private readonly unknownCmd: string) {} + + async execute(ctx: CommandContext): Promise { + ctx.logger.error( + buildUsageDiagnostic({ + code: 'UNKNOWN_COMMAND', + title: `Unknown tnmsc command: ${this.unknownCmd}`, + rootCause: diagnosticLines(`tnmsc does not recognize the "${this.unknownCmd}" subcommand.`), + exactFix: diagnosticLines('Run `tnmsc help` and invoke one of the supported commands.'), + possibleFixes: [diagnosticLines('Check the command spelling and remove unsupported aliases or flags.')], + details: {command: this.unknownCmd} + }) + ) + ctx.logger.info('run "tnmsc help" for available commands') + return {success: false, filesAffected: 0, dirsAffected: 0, message: `Unknown command: ${this.unknownCmd}`} + } +} diff --git a/sdk/src/commands/VersionCommand.ts b/cli/src/commands/VersionCommand.ts similarity index 59% rename from sdk/src/commands/VersionCommand.ts rename to cli/src/commands/VersionCommand.ts index 6f03525e..c49ab789 100644 --- a/sdk/src/commands/VersionCommand.ts +++ b/cli/src/commands/VersionCommand.ts @@ -2,28 +2,15 @@ import type {Command, CommandContext, CommandResult} from './Command' const CLI_NAME = 'tnmsc' -/** - * Get CLI version from build-time injected constant. - * Falls back to 'unknown' in development mode. - */ export function getCliVersion(): string { return typeof __CLI_VERSION__ !== 'undefined' ? __CLI_VERSION__ : 'dev' } -/** - * Version command - displays CLI version - */ export class VersionCommand implements Command { readonly name = 'version' async execute(ctx: CommandContext): Promise { ctx.logger.info(`${CLI_NAME} v${getCliVersion()}`) - - return { - success: true, - filesAffected: 0, - dirsAffected: 0, - message: 'Version displayed' - } + return {success: true, filesAffected: 0, dirsAffected: 0, message: 'Version displayed'} } } diff --git a/cli/src/commands/bridge.rs b/cli/src/commands/bridge.rs new file mode 100644 index 00000000..a068e599 --- /dev/null +++ b/cli/src/commands/bridge.rs @@ -0,0 +1,21 @@ +use std::process::ExitCode; + +pub fn execute(json_mode: bool) -> ExitCode { + tnmsc::bridge::node::run_node_command("execute", json_mode, &[]) +} + +pub fn dry_run(json_mode: bool) -> ExitCode { + tnmsc::bridge::node::run_node_command("dry-run", json_mode, &[]) +} + +pub fn clean(json_mode: bool) -> ExitCode { + tnmsc::bridge::node::run_node_command("clean", json_mode, &[]) +} + +pub fn dry_run_clean(json_mode: bool) -> ExitCode { + tnmsc::bridge::node::run_node_command("clean", json_mode, &["--dry-run"]) +} + +pub fn plugins(json_mode: bool) -> ExitCode { + tnmsc::bridge::node::run_node_command("plugins", json_mode, &[]) +} diff --git a/cli/src/commands/config_cmd.rs b/cli/src/commands/config_cmd.rs new file mode 100644 index 00000000..5ac94898 --- /dev/null +++ b/cli/src/commands/config_cmd.rs @@ -0,0 +1,33 @@ +use std::path::Path; +use std::process::ExitCode; + +use tnmsc_logger::create_logger; + +pub fn execute(pairs: &[(String, String)]) -> ExitCode { + let logger = create_logger("config", None); + + for (key, _) in pairs { + if key != "workspaceDir" && key != "logLevel" { + logger.info( + format!( + "Unknown config key was ignored: {key}. Supported keys: workspaceDir, logLevel" + ), + None, + ); + } + } + + match tnmsc::update_global_config_from_pairs(Path::new("."), pairs) { + Ok(config_path) => { + logger.info( + serde_json::Value::String(format!("Config saved to {}", config_path.display())), + None, + ); + ExitCode::SUCCESS + } + Err(error) => { + eprintln!("{error}"); + ExitCode::FAILURE + } + } +} diff --git a/cli/src/commands/config_show.rs b/cli/src/commands/config_show.rs new file mode 100644 index 00000000..4525933e --- /dev/null +++ b/cli/src/commands/config_show.rs @@ -0,0 +1,15 @@ +use std::path::Path; +use std::process::ExitCode; + +pub fn execute() -> ExitCode { + match tnmsc::config_show(Path::new(".")) { + Ok(json) => { + println!("{json}"); + ExitCode::SUCCESS + } + Err(error) => { + eprintln!("{error}"); + ExitCode::FAILURE + } + } +} diff --git a/sdk/src/commands/factories/CleanCommandFactory.ts b/cli/src/commands/factories/CleanCommandFactory.ts similarity index 68% rename from sdk/src/commands/factories/CleanCommandFactory.ts rename to cli/src/commands/factories/CleanCommandFactory.ts index 017d1025..3e92a178 100644 --- a/sdk/src/commands/factories/CleanCommandFactory.ts +++ b/cli/src/commands/factories/CleanCommandFactory.ts @@ -4,17 +4,12 @@ import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {CleanCommand} from '../CleanCommand' import {DryRunCleanCommand} from '../DryRunCleanCommand' -/** - * Factory for creating CleanCommand or DryRunCleanCommand - * Handles 'clean' subcommand with optional --dry-run flag - */ export class CleanCommandFactory implements CommandFactory { canHandle(args: ParsedCliArgs): boolean { return args.subcommand === 'clean' } createCommand(args: ParsedCliArgs): Command { - if (args.dryRun) return new DryRunCleanCommand() - return new CleanCommand() + return args.dryRun ? new DryRunCleanCommand() : new CleanCommand() } } diff --git a/sdk/src/commands/factories/ConfigCommandFactory.ts b/cli/src/commands/factories/ConfigCommandFactory.ts similarity index 67% rename from sdk/src/commands/factories/ConfigCommandFactory.ts rename to cli/src/commands/factories/ConfigCommandFactory.ts index bc7b6fe0..005ea10f 100644 --- a/sdk/src/commands/factories/ConfigCommandFactory.ts +++ b/cli/src/commands/factories/ConfigCommandFactory.ts @@ -4,26 +4,19 @@ import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {ConfigCommand} from '../ConfigCommand' import {ConfigShowCommand} from '../ConfigShowCommand' -/** - * Factory for creating ConfigCommand or ConfigShowCommand - * Handles 'config' subcommand with --show flag or key=value arguments - */ export class ConfigCommandFactory implements CommandFactory { canHandle(args: ParsedCliArgs): boolean { return args.subcommand === 'config' } createCommand(args: ParsedCliArgs): Command { - if (args.showFlag) { // Config --show subcommand - return new ConfigShowCommand() - } + if (args.showFlag) return new ConfigShowCommand() - const parsedPositional: [key: string, value: string][] = [] // Parse positional arguments as key=value pairs + const parsedPositional: [key: string, value: string][] = [] for (const arg of args.positional) { const eqIndex = arg.indexOf('=') if (eqIndex > 0) parsedPositional.push([arg.slice(0, eqIndex), arg.slice(eqIndex + 1)]) } - return new ConfigCommand([...args.setOption, ...parsedPositional]) } } diff --git a/sdk/src/commands/factories/DryRunCommandFactory.ts b/cli/src/commands/factories/DryRunCommandFactory.ts similarity index 73% rename from sdk/src/commands/factories/DryRunCommandFactory.ts rename to cli/src/commands/factories/DryRunCommandFactory.ts index 232901ea..cefc3b6f 100644 --- a/sdk/src/commands/factories/DryRunCommandFactory.ts +++ b/cli/src/commands/factories/DryRunCommandFactory.ts @@ -3,17 +3,12 @@ import type {CommandFactory} from '../CommandFactory' import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {DryRunOutputCommand} from '../DryRunOutputCommand' -/** - * Factory for creating DryRunOutputCommand - * Handles 'dry-run' subcommand - */ export class DryRunCommandFactory implements CommandFactory { canHandle(args: ParsedCliArgs): boolean { return args.subcommand === 'dry-run' } - createCommand(args: ParsedCliArgs): Command { - void args + createCommand(): Command { return new DryRunOutputCommand() } } diff --git a/cli/src/commands/factories/ExecuteCommandFactory.ts b/cli/src/commands/factories/ExecuteCommandFactory.ts new file mode 100644 index 00000000..681b3447 --- /dev/null +++ b/cli/src/commands/factories/ExecuteCommandFactory.ts @@ -0,0 +1,13 @@ +import type {Command} from '../Command' +import type {CommandFactory} from '../CommandFactory' +import {ExecuteCommand} from '../ExecuteCommand' + +export class ExecuteCommandFactory implements CommandFactory { + canHandle(): boolean { + return true + } + + createCommand(): Command { + return new ExecuteCommand() + } +} diff --git a/sdk/src/commands/factories/HelpCommandFactory.ts b/cli/src/commands/factories/HelpCommandFactory.ts similarity index 77% rename from sdk/src/commands/factories/HelpCommandFactory.ts rename to cli/src/commands/factories/HelpCommandFactory.ts index 3b4174a5..7db10b96 100644 --- a/sdk/src/commands/factories/HelpCommandFactory.ts +++ b/cli/src/commands/factories/HelpCommandFactory.ts @@ -4,10 +4,6 @@ import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {FactoryPriority} from '../CommandFactory' import {HelpCommand} from '../HelpCommand' -/** - * Factory for creating HelpCommand - * Handles --help flag and 'help' subcommand - */ export class HelpCommandFactory implements PrioritizedCommandFactory { readonly priority = FactoryPriority.Flags @@ -15,8 +11,7 @@ export class HelpCommandFactory implements PrioritizedCommandFactory { return args.helpFlag || args.subcommand === 'help' } - createCommand(args: ParsedCliArgs): Command { - void args + createCommand(): Command { return new HelpCommand() } } diff --git a/sdk/src/commands/factories/InitCommandFactory.ts b/cli/src/commands/factories/InitCommandFactory.ts similarity index 86% rename from sdk/src/commands/factories/InitCommandFactory.ts rename to cli/src/commands/factories/InitCommandFactory.ts index 71f55fca..afe09f8e 100644 --- a/sdk/src/commands/factories/InitCommandFactory.ts +++ b/cli/src/commands/factories/InitCommandFactory.ts @@ -8,8 +8,7 @@ export class InitCommandFactory implements CommandFactory { return args.subcommand === 'init' } - createCommand(args: ParsedCliArgs): Command { - void args + createCommand(): Command { return new InitCommand() } } diff --git a/sdk/src/commands/factories/PluginsCommandFactory.ts b/cli/src/commands/factories/PluginsCommandFactory.ts similarity index 73% rename from sdk/src/commands/factories/PluginsCommandFactory.ts rename to cli/src/commands/factories/PluginsCommandFactory.ts index 11b25ecb..2d3f87d3 100644 --- a/sdk/src/commands/factories/PluginsCommandFactory.ts +++ b/cli/src/commands/factories/PluginsCommandFactory.ts @@ -3,17 +3,12 @@ import type {CommandFactory} from '../CommandFactory' import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {PluginsCommand} from '../PluginsCommand' -/** - * Factory for creating PluginsCommand - * Handles 'plugins' subcommand - */ export class PluginsCommandFactory implements CommandFactory { canHandle(args: ParsedCliArgs): boolean { return args.subcommand === 'plugins' } - createCommand(args: ParsedCliArgs): Command { - void args + createCommand(): Command { return new PluginsCommand() } } diff --git a/sdk/src/commands/factories/UnknownCommandFactory.ts b/cli/src/commands/factories/UnknownCommandFactory.ts similarity index 72% rename from sdk/src/commands/factories/UnknownCommandFactory.ts rename to cli/src/commands/factories/UnknownCommandFactory.ts index 6c97fb62..bea8f387 100644 --- a/sdk/src/commands/factories/UnknownCommandFactory.ts +++ b/cli/src/commands/factories/UnknownCommandFactory.ts @@ -4,10 +4,6 @@ import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {FactoryPriority} from '../CommandFactory' import {UnknownCommand} from '../UnknownCommand' -/** - * Factory for creating UnknownCommand - * Handles unknown/invalid subcommands - */ export class UnknownCommandFactory implements PrioritizedCommandFactory { readonly priority = FactoryPriority.Unknown @@ -16,7 +12,6 @@ export class UnknownCommandFactory implements PrioritizedCommandFactory { } createCommand(args: ParsedCliArgs): Command { - if (args.unknownCommand == null) return new UnknownCommand('') - return new UnknownCommand(args.unknownCommand) + return new UnknownCommand(args.unknownCommand ?? '') } } diff --git a/sdk/src/commands/factories/VersionCommandFactory.ts b/cli/src/commands/factories/VersionCommandFactory.ts similarity index 77% rename from sdk/src/commands/factories/VersionCommandFactory.ts rename to cli/src/commands/factories/VersionCommandFactory.ts index 95dbc123..f0deb6d1 100644 --- a/sdk/src/commands/factories/VersionCommandFactory.ts +++ b/cli/src/commands/factories/VersionCommandFactory.ts @@ -4,10 +4,6 @@ import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {FactoryPriority} from '../CommandFactory' import {VersionCommand} from '../VersionCommand' -/** - * Factory for creating VersionCommand - * Handles --version flag and 'version' subcommand - */ export class VersionCommandFactory implements PrioritizedCommandFactory { readonly priority = FactoryPriority.Flags @@ -15,8 +11,7 @@ export class VersionCommandFactory implements PrioritizedCommandFactory { return args.versionFlag || args.subcommand === 'version' } - createCommand(args: ParsedCliArgs): Command { - void args + createCommand(): Command { return new VersionCommand() } } diff --git a/sdk/src/commands/help.rs b/cli/src/commands/help.rs similarity index 100% rename from sdk/src/commands/help.rs rename to cli/src/commands/help.rs diff --git a/sdk/src/commands/mod.rs b/cli/src/commands/mod.rs similarity index 100% rename from sdk/src/commands/mod.rs rename to cli/src/commands/mod.rs diff --git a/sdk/src/commands/version.rs b/cli/src/commands/version.rs similarity index 64% rename from sdk/src/commands/version.rs rename to cli/src/commands/version.rs index 8321606a..b0cf860b 100644 --- a/sdk/src/commands/version.rs +++ b/cli/src/commands/version.rs @@ -1,6 +1,6 @@ use std::process::ExitCode; pub fn execute() -> ExitCode { - println!("{}", env!("CARGO_PKG_VERSION")); + println!("{}", tnmsc::version()); ExitCode::SUCCESS } diff --git a/cli/src/index.test.ts b/cli/src/index.test.ts index c14dfa2f..9966cb7d 100644 --- a/cli/src/index.test.ts +++ b/cli/src/index.test.ts @@ -1,16 +1,12 @@ -import { - createDefaultPluginConfig, - listPrompts, - runCli -} from '@truenine/memory-sync-sdk' +import {listPrompts} from '@truenine/memory-sync-sdk' import {describe, expect, it} from 'vitest' import * as cliShell from './index' describe('cli shell entrypoint', () => { - it('re-exports the sdk surface without executing the CLI runtime', async () => { - expect(cliShell.runCli).toBe(runCli) - expect(cliShell.createDefaultPluginConfig).toBe(createDefaultPluginConfig) + it('re-exports sdk library APIs while keeping local shell exports', async () => { + expect(typeof cliShell.runCli).toBe('function') + expect(typeof cliShell.createDefaultPluginConfig).toBe('function') expect(cliShell.listPrompts).toBe(listPrompts) }) }) diff --git a/cli/src/index.ts b/cli/src/index.ts index 31428d56..a99ce905 100644 --- a/cli/src/index.ts +++ b/cli/src/index.ts @@ -3,8 +3,10 @@ import {existsSync, realpathSync} from 'node:fs' import process from 'node:process' import {fileURLToPath} from 'node:url' -import {runCli} from '@truenine/memory-sync-sdk' +import {runCli} from './cli-runtime' +export * from './cli-runtime' +export * from './plugin.config' export * from '@truenine/memory-sync-sdk' function isCliEntrypoint(argv: readonly string[] = process.argv): boolean { @@ -13,8 +15,7 @@ function isCliEntrypoint(argv: readonly string[] = process.argv): boolean { try { return realpathSync(entryPath) === realpathSync(fileURLToPath(import.meta.url)) - } - catch { + } catch { return false } } diff --git a/cli/src/main.rs b/cli/src/main.rs index eccb7bc9..5233764d 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -4,6 +4,7 @@ //! Bridge commands (Node.js): execute, dry-run, clean, plugins mod cli; +mod commands; use std::process::ExitCode; @@ -27,14 +28,14 @@ fn main() -> ExitCode { let command = resolve_command(&cli); match command { - ResolvedCommand::Help => tnmsc::commands::help::execute(), - ResolvedCommand::Version => tnmsc::commands::version::execute(), - ResolvedCommand::Config(pairs) => tnmsc::commands::config_cmd::execute(&pairs), - ResolvedCommand::ConfigShow => tnmsc::commands::config_show::execute(), - ResolvedCommand::Execute => tnmsc::commands::bridge::execute(json_mode), - ResolvedCommand::DryRun => tnmsc::commands::bridge::dry_run(json_mode), - ResolvedCommand::Clean => tnmsc::commands::bridge::clean(json_mode), - ResolvedCommand::DryRunClean => tnmsc::commands::bridge::dry_run_clean(json_mode), - ResolvedCommand::Plugins => tnmsc::commands::bridge::plugins(json_mode), + ResolvedCommand::Help => commands::help::execute(), + ResolvedCommand::Version => commands::version::execute(), + ResolvedCommand::Config(pairs) => commands::config_cmd::execute(&pairs), + ResolvedCommand::ConfigShow => commands::config_show::execute(), + ResolvedCommand::Execute => commands::bridge::execute(json_mode), + ResolvedCommand::DryRun => commands::bridge::dry_run(json_mode), + ResolvedCommand::Clean => commands::bridge::clean(json_mode), + ResolvedCommand::DryRunClean => commands::bridge::dry_run_clean(json_mode), + ResolvedCommand::Plugins => commands::bridge::plugins(json_mode), } } diff --git a/sdk/src/pipeline/CliArgumentParser.test.ts b/cli/src/pipeline/CliArgumentParser.test.ts similarity index 100% rename from sdk/src/pipeline/CliArgumentParser.test.ts rename to cli/src/pipeline/CliArgumentParser.test.ts diff --git a/sdk/src/pipeline/CliArgumentParser.ts b/cli/src/pipeline/CliArgumentParser.ts similarity index 60% rename from sdk/src/pipeline/CliArgumentParser.ts rename to cli/src/pipeline/CliArgumentParser.ts index ac5c1b60..0ead16dd 100644 --- a/sdk/src/pipeline/CliArgumentParser.ts +++ b/cli/src/pipeline/CliArgumentParser.ts @@ -1,10 +1,3 @@ -/** - * CLI Argument Parser Module - * Handles extraction and parsing of command-line arguments - * - * Refactored to use Command Factory pattern for command creation - */ - import type {Command} from '@/commands/Command' import {FactoryPriority} from '@/commands/CommandFactory' import {CommandRegistry} from '@/commands/CommandRegistry' @@ -18,19 +11,9 @@ import {PluginsCommandFactory} from '@/commands/factories/PluginsCommandFactory' import {UnknownCommandFactory} from '@/commands/factories/UnknownCommandFactory' import {VersionCommandFactory} from '@/commands/factories/VersionCommandFactory' -/** - * Valid subcommands for the CLI - */ export type Subcommand = 'help' | 'version' | 'init' | 'dry-run' | 'clean' | 'config' | 'plugins' - -/** - * Valid log levels for the CLI - */ export type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' -/** - * Command line argument parsing result - */ export interface ParsedCliArgs { readonly subcommand: Subcommand | undefined readonly helpFlag: boolean @@ -45,14 +28,7 @@ export interface ParsedCliArgs { readonly unknown: readonly string[] } -/** - * Valid subcommands set for quick lookup - */ const VALID_SUBCOMMANDS: ReadonlySet = new Set(['help', 'version', 'init', 'dry-run', 'clean', 'config', 'plugins']) - -/** - * Log level flags mapping - */ const LOG_LEVEL_FLAGS: ReadonlyMap = new Map([ ['--trace', 'trace'], ['--debug', 'debug'], @@ -60,10 +36,6 @@ const LOG_LEVEL_FLAGS: ReadonlyMap = new Map([ ['--warn', 'warn'], ['--error', 'error'] ]) - -/** - * Log level priority map (lower number = more verbose) - */ const LOG_LEVEL_PRIORITY: ReadonlyMap = new Map([ ['trace', 0], ['debug', 1], @@ -72,41 +44,25 @@ const LOG_LEVEL_PRIORITY: ReadonlyMap = new Map([ ['error', 4] ]) -/** - * Extract actual user arguments from argv - * Compatible with various execution scenarios: npx, node, tsx, direct execution, etc. - */ export function extractUserArgs(argv: readonly string[]): string[] { const args = [...argv] - - const first = args[0] // Skip runtime path (node, bun, deno, etc.) + const first = args[0] if (first != null && isRuntimeExecutable(first)) args.shift() - - const second = args[0] // Skip script path or npx package name + const second = args[0] if (second != null && isScriptOrPackage(second)) args.shift() - return args } -/** - * Determine if it is a runtime executable - */ function isRuntimeExecutable(arg: string): boolean { const runtimes = ['node', 'nodejs', 'bun', 'deno', 'tsx', 'ts-node', 'npx', 'pnpx', 'yarn', 'pnpm'] const normalized = arg.toLowerCase().replaceAll('\\', '/') - return runtimes.some(rt => { - const pattern = new RegExp(`(?:^|/)${rt}(?:\\.exe|\\.cmd|\\.ps1)?$`, 'i') - return pattern.test(normalized) || normalized === rt - }) + return runtimes.some(runtime => new RegExp(`(?:^|/)${runtime}(?:\\.exe|\\.cmd|\\.ps1)?$`, 'i').test(normalized) || normalized === runtime) } -/** - * Determine if it is a script file or package name - */ function isScriptOrPackage(arg: string): boolean { - if (/\.(?:m?[jt]s|cjs)$/.test(arg)) return true // Script file - if (/[/\\]/.test(arg) && !arg.startsWith('-')) return true // File path containing separators - return /^(?:@[\w-]+\/)?[\w-]+$/.test(arg) && !arg.startsWith('-') // npx executed package name + if (/\.(?:m?[jt]s|cjs)$/u.test(arg)) return true + if (/[/\\]/u.test(arg) && !arg.startsWith('-')) return true + return /^(?:@[\w-]+\/)?[\w-]+$/u.test(arg) && !arg.startsWith('-') } function pickMoreVerbose(current: LogLevel | undefined, candidate: LogLevel): LogLevel { @@ -116,9 +72,6 @@ function pickMoreVerbose(current: LogLevel | undefined, candidate: LogLevel): Lo return candidatePriority < currentPriority ? candidate : current } -/** - * Parse command line arguments into structured result - */ export function parseArgs(args: readonly string[]): ParsedCliArgs { const result: { subcommand: Subcommand | undefined @@ -147,119 +100,118 @@ export function parseArgs(args: readonly string[]): ParsedCliArgs { } let firstPositionalProcessed = false - for (let i = 0; i < args.length; i++) { const arg = args[i] if (arg == null) continue - - if (arg === '--') { // Handle -- separator: all following args are positional - result.positional.push(...args.slice(i + 1).filter((a): a is string => a != null)) + if (arg === '--') { + result.positional.push(...args.slice(i + 1).filter((value): value is string => value != null)) break } - if (arg.startsWith('--')) { // Long options + if (arg.startsWith('--')) { const parts = arg.split('=') const key = parts[0] ?? '' - - const logLevel = LOG_LEVEL_FLAGS.get(key) // Check log level flags + const logLevel = LOG_LEVEL_FLAGS.get(key) if (logLevel != null) { result.logLevel = pickMoreVerbose(result.logLevel, logLevel) continue } switch (key) { - case '--help': result.helpFlag = true; break - case '--version': result.versionFlag = true; break - case '--dry-run': result.dryRun = true; break - case '--json': result.jsonFlag = true; break - case '--show': result.showFlag = true; break - case '--set': - if (parts.length > 1) { // Parse --set key=value from next arg or from = syntax + case '--help': + result.helpFlag = true + break + case '--version': + result.versionFlag = true + break + case '--dry-run': + result.dryRun = true + break + case '--json': + result.jsonFlag = true + break + case '--show': + result.showFlag = true + break + case '--set': { + if (parts.length > 1) { const keyValue = parts.slice(1).join('=') const eqIndex = keyValue.indexOf('=') if (eqIndex > 0) result.setOption.push([keyValue.slice(0, eqIndex), keyValue.slice(eqIndex + 1)]) } else { - const nextArg = args[i + 1] // Next arg is the value + const nextArg = args[i + 1] if (nextArg != null) { const eqIndex = nextArg.indexOf('=') if (eqIndex > 0) { result.setOption.push([nextArg.slice(0, eqIndex), nextArg.slice(eqIndex + 1)]) - i++ // Skip next arg + i++ } } } break - default: result.unknown.push(arg) + } + default: + result.unknown.push(arg) } continue } - if (arg.startsWith('-') && arg.length > 1) { // Short options - const flags = arg.slice(1) - for (const flag of flags) { + if (arg.startsWith('-') && arg.length > 1) { + for (const flag of arg.slice(1)) { switch (flag) { - case 'h': result.helpFlag = true; break - case 'v': result.versionFlag = true; break - case 'n': result.dryRun = true; break - case 'j': result.jsonFlag = true; break - default: result.unknown.push(`-${flag}`) + case 'h': + result.helpFlag = true + break + case 'v': + result.versionFlag = true + break + case 'n': + result.dryRun = true + break + case 'j': + result.jsonFlag = true + break + default: + result.unknown.push(`-${flag}`) } } continue } - if (!firstPositionalProcessed) { // First positional argument: check if it's a subcommand + if (!firstPositionalProcessed) { firstPositionalProcessed = true if (VALID_SUBCOMMANDS.has(arg)) result.subcommand = arg as Subcommand - else { - result.unknownCommand = arg // Unknown first positional is captured as unknownCommand - } + else result.unknownCommand = arg continue } - result.positional.push(arg) // Remaining positional arguments + result.positional.push(arg) } return result } -/** - * Singleton instance of the command registry - * Lazy-loaded to ensure factories are only created when needed - */ -let commandRegistry: ReturnType | undefined +let commandRegistry: CommandRegistry | undefined function createDefaultCommandRegistry(): CommandRegistry { const registry = new CommandRegistry() - - registry.register(new VersionCommandFactory()) // High priority: flag-based commands + registry.register(new VersionCommandFactory()) registry.register(new HelpCommandFactory()) registry.register(new UnknownCommandFactory()) - registry.registerWithPriority(new InitCommandFactory(), FactoryPriority.Subcommand) registry.registerWithPriority(new DryRunCommandFactory(), FactoryPriority.Subcommand) registry.registerWithPriority(new CleanCommandFactory(), FactoryPriority.Subcommand) registry.registerWithPriority(new PluginsCommandFactory(), FactoryPriority.Subcommand) registry.registerWithPriority(new ConfigCommandFactory(), FactoryPriority.Subcommand) - - registry.registerWithPriority(new ExecuteCommandFactory(), FactoryPriority.Subcommand) // Lowest priority: default/catch-all command - + registry.registerWithPriority(new ExecuteCommandFactory(), FactoryPriority.Subcommand) return registry } -/** - * Get or create the command registry singleton - */ -function getCommandRegistry(): ReturnType { +function getCommandRegistry(): CommandRegistry { commandRegistry ??= createDefaultCommandRegistry() return commandRegistry } -/** - * Resolve command from parsed CLI arguments using factory pattern - * Delegates command creation to registered factories based on priority - */ export function resolveCommand(args: ParsedCliArgs): Command { - const registry = getCommandRegistry() - return registry.resolve(args) + return getCommandRegistry().resolve(args) } diff --git a/sdk/src/plugin-runtime.ts b/cli/src/plugin-runtime.ts similarity index 53% rename from sdk/src/plugin-runtime.ts rename to cli/src/plugin-runtime.ts index c23b0cf8..2d19eb2a 100644 --- a/sdk/src/plugin-runtime.ts +++ b/cli/src/plugin-runtime.ts @@ -1,79 +1,78 @@ -import type {OutputCleanContext, OutputWriteContext} from './plugins/plugin-core' -/** - * Plugin Runtime Entry Point - * - * Streamlined entry for the Rust CLI binary to spawn via Node.js. - * Accepts a subcommand and flags, executes the plugin pipeline, - * and outputs results to stdout. - * - * Usage: node plugin-runtime.mjs [--json] [--dry-run] - * - * Subcommands: execute, dry-run, clean, plugins - */ +import type {OutputCleanContext, OutputWriteContext, RuntimeCommand} from '@truenine/memory-sync-sdk' import type {Command, CommandContext} from '@/commands/Command' -import type {PipelineConfig} from '@/config' import process from 'node:process' +import { + buildUnhandledExceptionDiagnostic, + createLogger, + discoverOutputRuntimeTargets, + drainBufferedDiagnostics, + setGlobalLogLevel +} from '@truenine/memory-sync-sdk' import {CleanCommand} from '@/commands/CleanCommand' import {DryRunCleanCommand} from '@/commands/DryRunCleanCommand' import {DryRunOutputCommand} from '@/commands/DryRunOutputCommand' import {ExecuteCommand} from '@/commands/ExecuteCommand' import {JsonOutputCommand, toJsonCommandResult} from '@/commands/JsonOutputCommand' import {PluginsCommand} from '@/commands/PluginsCommand' -import {buildUnhandledExceptionDiagnostic} from '@/diagnostics' -import {discoverOutputRuntimeTargets} from '@/pipeline/OutputRuntimeTargets' import {createDefaultPluginConfig} from './plugin.config' -import {createLogger, drainBufferedDiagnostics, setGlobalLogLevel} from './plugins/plugin-core' -/** - * Parse runtime arguments. - * Expected: node plugin-runtime.mjs [--json] [--dry-run] - */ -function parseRuntimeArgs(argv: string[]): {subcommand: string, json: boolean, dryRun: boolean} { - const args = argv.slice(2) // Skip node and script path - let subcommand = 'execute' +function parseRuntimeArgs(argv: string[]): {subcommand: RuntimeCommand, json: boolean, dryRun: boolean} { + const args = argv.slice(2) + let subcommand: RuntimeCommand = 'execute' let json = false let dryRun = false - for (const arg of args) { if (arg === '--json' || arg === '-j') json = true else if (arg === '--dry-run' || arg === '-n') dryRun = true - else if (!arg.startsWith('-')) subcommand = arg + else if (!arg.startsWith('-')) { + subcommand = arg === 'plugins' || arg === 'clean' || arg === 'dry-run' ? arg : 'execute' + } } - return {subcommand, json, dryRun} } -/** - * Resolve command from subcommand string. - */ -function resolveRuntimeCommand(subcommand: string, dryRun: boolean): Command { +function resolveRuntimeCommand(subcommand: RuntimeCommand, dryRun: boolean): Command { switch (subcommand) { - case 'execute': return new ExecuteCommand() - case 'dry-run': return new DryRunOutputCommand() - case 'clean': return dryRun ? new DryRunCleanCommand() : new CleanCommand() - case 'plugins': return new PluginsCommand() - default: return new ExecuteCommand() + case 'execute': + return new ExecuteCommand() + case 'dry-run': + return new DryRunOutputCommand() + case 'clean': + return dryRun ? new DryRunCleanCommand() : new CleanCommand() + case 'plugins': + return new PluginsCommand() } } +function writeJsonFailure(error: unknown): void { + const logger = createLogger('plugin-runtime', 'silent') + logger.error(buildUnhandledExceptionDiagnostic('plugin-runtime', error)) + process.stdout.write( + `${JSON.stringify( + toJsonCommandResult( + { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: error instanceof Error ? error.message : String(error) + }, + drainBufferedDiagnostics() + ) + )}\n` + ) +} + async function main(): Promise { const {subcommand, json, dryRun} = parseRuntimeArgs(process.argv) - if (json) setGlobalLogLevel('silent') - const userPluginConfig: PipelineConfig = await createDefaultPluginConfig(process.argv) - + const userPluginConfig = await createDefaultPluginConfig(process.argv, subcommand) let command = resolveRuntimeCommand(subcommand, dryRun) - - if (json) { - const selfJsonCommands = new Set(['plugins']) - if (!selfJsonCommands.has(command.name)) command = new JsonOutputCommand(command) - } + if (json && !new Set(['plugins']).has(command.name)) command = new JsonOutputCommand(command) const {context, outputPlugins, userConfigOptions} = userPluginConfig const logger = createLogger('PluginRuntime') const runtimeTargets = discoverOutputRuntimeTargets(logger) - const createCleanContext = (dry: boolean): OutputCleanContext => ({ logger, collectedOutputContext: context, @@ -81,7 +80,6 @@ async function main(): Promise { runtimeTargets, dryRun: dry }) - const createWriteContext = (dry: boolean): OutputWriteContext => ({ logger, collectedOutputContext: context, @@ -90,7 +88,6 @@ async function main(): Promise { dryRun: dry, registeredPluginNames: Array.from(outputPlugins, plugin => plugin.name) }) - const commandCtx: CommandContext = { logger, outputPlugins: [...outputPlugins], @@ -99,30 +96,17 @@ async function main(): Promise { createCleanContext, createWriteContext } - const result = await command.execute(commandCtx) if (!result.success) process.exit(1) } -function writeJsonFailure(error: unknown): void { - const errorMessage = error instanceof Error ? error.message : String(error) - const logger = createLogger('plugin-runtime', 'silent') - logger.error(buildUnhandledExceptionDiagnostic('plugin-runtime', error)) - process.stdout.write(`${JSON.stringify(toJsonCommandResult({ - success: false, - filesAffected: 0, - dirsAffected: 0, - message: errorMessage - }, drainBufferedDiagnostics()))}\n`) -} - -main().catch((e: unknown) => { +main().catch(error => { const {json} = parseRuntimeArgs(process.argv) if (json) { - writeJsonFailure(e) + writeJsonFailure(error) process.exit(1) } const logger = createLogger('plugin-runtime', 'error') - logger.error(buildUnhandledExceptionDiagnostic('plugin-runtime', e)) + logger.error(buildUnhandledExceptionDiagnostic('plugin-runtime', error)) process.exit(1) }) diff --git a/cli/src/plugin.config.ts b/cli/src/plugin.config.ts new file mode 100644 index 00000000..7820f771 --- /dev/null +++ b/cli/src/plugin.config.ts @@ -0,0 +1,70 @@ +import type {PipelineConfig, RuntimeCommand} from '@truenine/memory-sync-sdk' +import process from 'node:process' +import { + AgentsOutputPlugin, + ClaudeCodeCLIOutputPlugin, + CodexCLIOutputPlugin, + CursorOutputPlugin, + defineConfig, + DroidCLIOutputPlugin, + EditorConfigOutputPlugin, + GeminiCLIOutputPlugin, + GenericSkillsOutputPlugin, + GitExcludeOutputPlugin, + JetBrainsAIAssistantCodexOutputPlugin, + JetBrainsIDECodeStyleConfigOutputPlugin, + OpencodeCLIOutputPlugin, + QoderIDEPluginOutputPlugin, + ReadmeMdConfigFileOutputPlugin, + TraeCNIDEOutputPlugin, + TraeIDEOutputPlugin, + VisualStudioCodeIDEConfigOutputPlugin, + WarpIDEOutputPlugin, + WindsurfOutputPlugin, + ZedIDEConfigOutputPlugin +} from '@truenine/memory-sync-sdk' + +export function resolveRuntimeCommandFromArgv(argv: readonly string[] = process.argv): RuntimeCommand { + const args = argv.filter((arg): arg is string => arg != null) + const userArgs = args.slice(2) + const subcommand = userArgs.find(arg => !arg.startsWith('-')) + if (subcommand === 'plugins') return 'plugins' + if (subcommand === 'clean') return 'clean' + if (subcommand === 'dry-run' || userArgs.includes('--dry-run') || userArgs.includes('-n')) return 'dry-run' + return 'execute' +} + +export async function createDefaultPluginConfig( + argv: readonly string[] = process.argv, + runtimeCommand: RuntimeCommand = resolveRuntimeCommandFromArgv(argv) +): Promise { + return defineConfig({ + runtimeCommand, + pluginOptions: { + plugins: [ + new AgentsOutputPlugin(), + new ClaudeCodeCLIOutputPlugin(), + new CodexCLIOutputPlugin(), + new JetBrainsAIAssistantCodexOutputPlugin(), + new DroidCLIOutputPlugin(), + new GeminiCLIOutputPlugin(), + new GenericSkillsOutputPlugin(), + new OpencodeCLIOutputPlugin(), + new QoderIDEPluginOutputPlugin(), + new TraeIDEOutputPlugin(), + new TraeCNIDEOutputPlugin(), + new WarpIDEOutputPlugin(), + new WindsurfOutputPlugin(), + new CursorOutputPlugin(), + new GitExcludeOutputPlugin(), + new JetBrainsIDECodeStyleConfigOutputPlugin(), + new EditorConfigOutputPlugin(), + new VisualStudioCodeIDEConfigOutputPlugin(), + new ZedIDEConfigOutputPlugin(), + new ReadmeMdConfigFileOutputPlugin() + ] + } + }) +} + +export default createDefaultPluginConfig diff --git a/sdk/src/script-runtime-worker.ts b/cli/src/script-runtime-worker.ts similarity index 74% rename from sdk/src/script-runtime-worker.ts rename to cli/src/script-runtime-worker.ts index ae6854a6..d29cfaed 100644 --- a/sdk/src/script-runtime-worker.ts +++ b/cli/src/script-runtime-worker.ts @@ -4,8 +4,8 @@ import {resolvePublicPathUnchecked} from '@truenine/script-runtime' async function main(): Promise { const [, , filePath, ctxJsonPath, logicalPath] = process.argv - if (filePath == null || ctxJsonPath == null || logicalPath == null) throw new Error('Usage: script-runtime-worker ') - + if (filePath == null || ctxJsonPath == null || logicalPath == null) + { throw new Error('Usage: script-runtime-worker ') } const ctxJson = readFileSync(ctxJsonPath, 'utf8') const ctx = JSON.parse(ctxJson) as Parameters[1] const result = await resolvePublicPathUnchecked(filePath, ctx, logicalPath) @@ -13,7 +13,6 @@ async function main(): Promise { } main().catch((error: unknown) => { - const message = error instanceof Error ? error.message : String(error) - process.stderr.write(`${message}\n`) + process.stderr.write(`${error instanceof Error ? error.message : String(error)}\n`) process.exit(1) }) diff --git a/cli/tsconfig.json b/cli/tsconfig.json index 9006c87e..6dc3fc16 100644 --- a/cli/tsconfig.json +++ b/cli/tsconfig.json @@ -11,7 +11,11 @@ "module": "ESNext", "moduleResolution": "Bundler", "paths": { + "@sdk": ["../sdk/src/index.ts"], + "@sdk/*": ["../sdk/src/*"], "@/*": ["./src/*"], + "@truenine/script-runtime": ["../libraries/script-runtime/dist/index.d.mts"], + "@truenine/script-runtime/*": ["../libraries/script-runtime/dist/*"], "@truenine/desk-paths": ["./src/core/desk-paths.ts"], "@truenine/desk-paths/*": ["./src/core/desk-paths/*"], "@truenine/plugin-output-shared": ["./src/plugins/plugin-output-shared/index.ts"], diff --git a/cli/tsdown.config.ts b/cli/tsdown.config.ts index 675f8756..7d43e4b7 100644 --- a/cli/tsdown.config.ts +++ b/cli/tsdown.config.ts @@ -1,6 +1,6 @@ import {defineConfig} from 'tsdown' -const noExternalDeps = ['@truenine/memory-sync-sdk'] +const alwaysBundleDeps = ['@truenine/memory-sync-sdk'] export default defineConfig([ { @@ -9,9 +9,9 @@ export default defineConfig([ sourcemap: false, unbundle: false, deps: { + alwaysBundle: alwaysBundleDeps, onlyBundle: false }, - noExternal: noExternalDeps, format: ['esm'], minify: true, dts: {sourcemap: false}, @@ -21,9 +21,33 @@ export default defineConfig([ entry: ['./src/globals.ts'], platform: 'node', sourcemap: false, - noExternal: noExternalDeps, + deps: { + alwaysBundle: alwaysBundleDeps + }, format: ['esm'], minify: false, dts: {sourcemap: false} + }, + { + entry: ['./src/plugin-runtime.ts'], + platform: 'node', + sourcemap: false, + deps: { + alwaysBundle: alwaysBundleDeps + }, + format: ['esm'], + minify: true, + dts: false + }, + { + entry: ['./src/script-runtime-worker.ts'], + platform: 'node', + sourcemap: false, + deps: { + alwaysBundle: alwaysBundleDeps + }, + format: ['esm'], + minify: true, + dts: false } ]) diff --git a/cli/vite.config.ts b/cli/vite.config.ts index 7830f95c..6d2f1c26 100644 --- a/cli/vite.config.ts +++ b/cli/vite.config.ts @@ -1,3 +1,10 @@ +import {fileURLToPath} from 'node:url' import {defineConfig} from 'vite' -export default defineConfig({}) +export default defineConfig({ + resolve: { + alias: { + '@': fileURLToPath(new URL('./src', import.meta.url)) + } + } +}) diff --git a/sdk/build.rs b/sdk/build.rs index 807864da..5a81dcc9 100644 --- a/sdk/build.rs +++ b/sdk/build.rs @@ -15,12 +15,12 @@ fn main() { // Try multiple possible locations for plugin-runtime.mjs let possible_sources = vec![ - // Already built in sdk/dist - PathBuf::from("dist/plugin-runtime.mjs"), + // Already built in cli/dist + PathBuf::from("../cli/dist/plugin-runtime.mjs"), // From repo root - PathBuf::from("sdk/dist/plugin-runtime.mjs"), - // CI workspace path (when building from repo root) - PathBuf::from("../sdk/dist/plugin-runtime.mjs"), + PathBuf::from("cli/dist/plugin-runtime.mjs"), + // Local fallback when building from cli package directory + PathBuf::from("dist/plugin-runtime.mjs"), ]; let mut found = false; @@ -36,7 +36,7 @@ fn main() { if !found { panic!( "plugin-runtime.mjs not found for embedded-runtime feature. \ - Please build it first with: pnpm -F @truenine/memory-sync-sdk exec tsdown \ + Please build it first with: pnpm -F @truenine/memory-sync-cli run build:shell \ Searched paths: {:?}", possible_sources ); diff --git a/sdk/package.json b/sdk/package.json index 3c4eda81..d6267939 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -70,15 +70,15 @@ "typecheck:run": "tsc --noEmit -p tsconfig.lib.json" }, "dependencies": { + "@truenine/logger": "workspace:*", + "@truenine/md-compiler": "workspace:*", + "@truenine/script-runtime": "workspace:*", "json5": "catalog:", "yaml": "catalog:", "zod": "catalog:" }, "devDependencies": { "@clack/prompts": "catalog:", - "@truenine/logger": "workspace:*", - "@truenine/md-compiler": "workspace:*", - "@truenine/script-runtime": "workspace:*", "@types/fs-extra": "catalog:", "@types/picomatch": "catalog:", "@vitest/coverage-v8": "catalog:", diff --git a/sdk/src/PluginPipeline.test.ts b/sdk/src/PluginPipeline.test.ts deleted file mode 100644 index 27d12a4f..00000000 --- a/sdk/src/PluginPipeline.test.ts +++ /dev/null @@ -1,60 +0,0 @@ -import type {PipelineConfig} from './config' -import type {OutputPlugin} from './plugins/plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from './config' -import {PluginPipeline} from './PluginPipeline' -import {createLogger, FilePathKind, PluginKind} from './plugins/plugin-core' - -describe('plugin pipeline output contexts', () => { - it('passes user config options into write contexts', async () => { - const tempDir = path.resolve('tmp/plugin-pipeline-frontmatter') - fs.rmSync(tempDir, {recursive: true, force: true}) - fs.mkdirSync(tempDir, {recursive: true}) - - const outputPath = path.join(tempDir, 'frontmatter.txt') - let seenBlankLineAfter: boolean | undefined - - const plugin: OutputPlugin = { - type: PluginKind.Output, - name: 'CaptureOutputPlugin', - log: createLogger('CaptureOutputPlugin', 'error'), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles(ctx) { - seenBlankLineAfter = ctx.pluginOptions?.frontMatter?.blankLineAfter - return [{path: outputPath, source: 'capture'}] - }, - async convertContent(_declaration, ctx) { - return String(ctx.pluginOptions?.frontMatter?.blankLineAfter) - } - } - - const config: PipelineConfig = { - context: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: tempDir, - getDirectoryName: () => path.basename(tempDir) - }, - projects: [] - } - }, - outputPlugins: [plugin], - userConfigOptions: mergeConfig({ - workspaceDir: tempDir, - frontMatter: { - blankLineAfter: false - } - }) - } - - const result = await new PluginPipeline('node', 'tnmsc').run(config) - - expect(result.success).toBe(true) - expect(seenBlankLineAfter).toBe(false) - expect(fs.readFileSync(outputPath, 'utf8')).toBe('false') - }) -}) diff --git a/sdk/src/PluginPipeline.ts b/sdk/src/PluginPipeline.ts deleted file mode 100644 index 652952ba..00000000 --- a/sdk/src/PluginPipeline.ts +++ /dev/null @@ -1,101 +0,0 @@ -import type {ILogger, OutputCleanContext, OutputCollectedContext, OutputPlugin, OutputRuntimeTargets, OutputWriteContext, PluginOptions} from './plugins/plugin-core' -import type {Command, CommandContext, CommandResult} from '@/commands/Command' -import type {PipelineConfig} from '@/config' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {JsonOutputCommand} from '@/commands/JsonOutputCommand' -import {extractUserArgs, parseArgs, resolveCommand} from '@/pipeline/CliArgumentParser' -import {discoverOutputRuntimeTargets} from '@/pipeline/OutputRuntimeTargets' -import {createLogger, setGlobalLogLevel} from './plugins/plugin-core' - -/** - * Plugin Pipeline - Orchestrates plugin execution - * - * This class has been refactored to use modular components: - * - CliArgumentParser: CLI argument parsing (moved to @/pipeline) - * - DependencyResolver: dependency ordering (moved to @/pipeline) - * - ContextMerger: Context merging (moved to @/pipeline) - */ -export class PluginPipeline { - private readonly logger: ILogger - readonly args: ParsedCliArgs - private outputPlugins: OutputPlugin[] = [] - private runtimeTargets?: OutputRuntimeTargets - - constructor(...cmdArgs: (string | undefined)[]) { - const filtered = cmdArgs.filter((arg): arg is string => arg != null) - const userArgs = extractUserArgs(filtered) - this.args = parseArgs(userArgs) - - const resolvedLogLevel = this.args.logLevel // Resolve log level from parsed args and set globally - if (resolvedLogLevel != null) setGlobalLogLevel(resolvedLogLevel) - this.logger = createLogger('PluginPipeline', resolvedLogLevel) - this.logger.debug('initialized', {args: this.args}) - } - - registerOutputPlugins(plugins: OutputPlugin[]): this { - this.outputPlugins.push(...plugins) - return this - } - - async run(config: PipelineConfig): Promise { - const {context, outputPlugins, userConfigOptions} = config - this.registerOutputPlugins([...outputPlugins]) - - let command: Command = resolveCommand(this.args) - - if (this.args.jsonFlag) { - setGlobalLogLevel('silent') // Suppress all console logging in JSON mode - - const selfJsonCommands = new Set(['config-show', 'plugins']) // only need log suppression, not JsonOutputCommand wrapping // Commands that handle their own JSON output (config --show, plugins) - if (!selfJsonCommands.has(command.name)) command = new JsonOutputCommand(command) - } - - const commandCtx = this.createCommandContext(context, userConfigOptions) - return command.execute(commandCtx) - } - - private createCommandContext(ctx: OutputCollectedContext, userConfigOptions: Required): CommandContext { - return { - logger: this.logger, - outputPlugins: this.outputPlugins, - collectedOutputContext: ctx, - userConfigOptions, - createCleanContext: (dryRun: boolean) => this.createCleanContext(ctx, userConfigOptions, dryRun), - createWriteContext: (dryRun: boolean) => this.createWriteContext(ctx, userConfigOptions, dryRun) - } - } - - private createCleanContext( - ctx: OutputCollectedContext, - userConfigOptions: Required, - dryRun: boolean - ): OutputCleanContext { - return { - logger: this.logger, - collectedOutputContext: ctx, - pluginOptions: userConfigOptions, - runtimeTargets: this.getRuntimeTargets(), - dryRun - } - } - - private createWriteContext( - ctx: OutputCollectedContext, - userConfigOptions: Required, - dryRun: boolean - ): OutputWriteContext { - return { - logger: this.logger, - collectedOutputContext: ctx, - pluginOptions: userConfigOptions, - runtimeTargets: this.getRuntimeTargets(), - dryRun, - registeredPluginNames: this.outputPlugins.map(p => p.name) - } - } - - private getRuntimeTargets(): OutputRuntimeTargets { - this.runtimeTargets ??= discoverOutputRuntimeTargets(this.logger) - return this.runtimeTargets - } -} diff --git a/sdk/src/bridge/node.rs b/sdk/src/bridge/node.rs index 4681804c..de33803a 100644 --- a/sdk/src/bridge/node.rs +++ b/sdk/src/bridge/node.rs @@ -66,14 +66,12 @@ where /// /// Search order: /// 1. `/plugin-runtime.mjs` (release archive: binary + JS co-located) -/// 2. `/../dist/plugin-runtime.mjs` (dev mode: sdk/dist/) -/// 3. `/../sdk/dist/plugin-runtime.mjs` (dev mode from repo root) -/// 4. `/../cli/dist/plugin-runtime.mjs` (published CLI shell asset copy) -/// 5. `/dist/plugin-runtime.mjs` (fallback) -/// 6. `/sdk/dist/plugin-runtime.mjs` (fallback from repo root cwd) -/// 7. `/cli/dist/plugin-runtime.mjs` (published CLI shell fallback from repo root cwd) -/// 8. npm/pnpm global install: `/@truenine/memory-sync-cli/dist/plugin-runtime.mjs` -/// 9. Embedded JS extracted to `~/.aindex/.cache/plugin-runtime-.mjs` +/// 2. `/../dist/plugin-runtime.mjs` (dev mode: cli/dist/) +/// 3. `/../cli/dist/plugin-runtime.mjs` (repo-root fallback) +/// 4. `/dist/plugin-runtime.mjs` (fallback) +/// 5. `/cli/dist/plugin-runtime.mjs` (repo-root fallback from cwd) +/// 6. npm/pnpm global install: `/@truenine/memory-sync-cli/dist/plugin-runtime.mjs` +/// 7. Embedded JS extracted to `~/.aindex/.cache/plugin-runtime-.mjs` pub(crate) fn find_plugin_runtime() -> Option { let cache = PLUGIN_RUNTIME_CACHE.get_or_init(|| Mutex::new(None)); detect_with_cached_success(cache, detect_plugin_runtime) @@ -88,14 +86,12 @@ fn detect_plugin_runtime() -> Option { { candidates.push(exe_dir.join("plugin-runtime.mjs")); candidates.push(exe_dir.join("../dist/plugin-runtime.mjs")); - candidates.push(exe_dir.join("../sdk/dist/plugin-runtime.mjs")); candidates.push(exe_dir.join("../cli/dist/plugin-runtime.mjs")); } // Relative to CWD if let Ok(cwd) = std::env::current_dir() { candidates.push(cwd.join("dist/plugin-runtime.mjs")); - candidates.push(cwd.join("sdk/dist/plugin-runtime.mjs")); candidates.push(cwd.join("cli/dist/plugin-runtime.mjs")); } diff --git a/sdk/src/commands/CleanCommand.ts b/sdk/src/commands/CleanCommand.ts deleted file mode 100644 index bb8be0a8..00000000 --- a/sdk/src/commands/CleanCommand.ts +++ /dev/null @@ -1,34 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {performCleanup} from './CleanupUtils' - -/** - * Clean command - deletes registered output files and directories - */ -export class CleanCommand implements Command { - readonly name = 'clean' - - async execute(ctx: CommandContext): Promise { - const {logger, outputPlugins, createCleanContext} = ctx - logger.info('running clean pipeline', {command: 'clean'}) - - const cleanCtx = createCleanContext(false) - const result = await performCleanup(outputPlugins, cleanCtx, logger) - - if (result.violations.length > 0 || result.conflicts.length > 0) { - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - ...result.message != null ? {message: result.message} : {} - } - } - - logger.info('clean complete', {deletedFiles: result.deletedFiles, deletedDirs: result.deletedDirs}) - - return { - success: true, - filesAffected: result.deletedFiles, - dirsAffected: result.deletedDirs - } - } -} diff --git a/sdk/src/commands/CleanupUtils.adapter.test.ts b/sdk/src/commands/CleanupUtils.adapter.test.ts deleted file mode 100644 index 069ea3ab..00000000 --- a/sdk/src/commands/CleanupUtils.adapter.test.ts +++ /dev/null @@ -1,156 +0,0 @@ -import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it, vi} from 'vitest' -import {FilePathKind, PluginKind} from '../plugins/plugin-core' - -const nativeBindingMocks = vi.hoisted(() => ({ - planCleanup: vi.fn<(snapshotJson: string) => string>(), - performCleanup: vi.fn<(snapshotJson: string) => string>() -})) - -vi.mock('../core/native-binding', () => ({ - getNativeBinding: () => ({ - ...globalThis.__TNMSC_TEST_NATIVE_BINDING__, - planCleanup: nativeBindingMocks.planCleanup, - performCleanup: nativeBindingMocks.performCleanup - }) -})) - -const cleanupModulePromise = import('./CleanupUtils') - -function createMockLogger(): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger -} - -function createCleanContext(workspaceDir: string): OutputCleanContext { - return { - logger: createMockLogger(), - fs, - path, - glob, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [ - { - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceDir, 'project-a') - } - } - ] - }, - aindexDir: path.join(workspaceDir, 'aindex') - } - } as OutputCleanContext -} - -function createMockOutputPlugin(): OutputPlugin { - return { - type: PluginKind.Output, - name: 'MockOutputPlugin', - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return [{path: path.join('/tmp', 'project-a', 'AGENTS.md'), source: {}}] - }, - async declareCleanupPaths(): Promise { - return { - delete: [{kind: 'glob', path: path.join('/tmp', '.codex', 'skills', '*'), excludeBasenames: ['.system']}] - } - }, - async convertContent() { - return 'test' - } - } -} - -describe('cleanupUtils native adapter', () => { - it('uses the native cleanup bridge when it is available', async () => { - nativeBindingMocks.planCleanup.mockReset() - nativeBindingMocks.performCleanup.mockReset() - - nativeBindingMocks.planCleanup.mockReturnValue( - JSON.stringify({ - filesToDelete: ['/tmp/project-a/AGENTS.md'], - dirsToDelete: ['/tmp/.codex/skills/legacy'], - emptyDirsToDelete: ['/tmp/.codex/skills'], - violations: [], - conflicts: [], - excludedScanGlobs: ['**/.git/**'] - }) - ) - nativeBindingMocks.performCleanup.mockReturnValue( - JSON.stringify({ - deletedFiles: 1, - deletedDirs: 2, - errors: [], - violations: [], - conflicts: [], - filesToDelete: ['/tmp/project-a/AGENTS.md'], - dirsToDelete: ['/tmp/.codex/skills/legacy'], - emptyDirsToDelete: ['/tmp/.codex/skills'], - excludedScanGlobs: ['**/.git/**'] - }) - ) - - const {collectDeletionTargets, hasNativeCleanupBinding, performCleanup} = await cleanupModulePromise - const workspaceDir = path.resolve('tmp-native-cleanup-adapter') - const cleanCtx = createCleanContext(workspaceDir) - const plugin = createMockOutputPlugin() - - expect(hasNativeCleanupBinding()).toBe(true) - - const plan = await collectDeletionTargets([plugin], cleanCtx) - expect(plan).toEqual({ - filesToDelete: ['/tmp/project-a/AGENTS.md'], - dirsToDelete: ['/tmp/.codex/skills/legacy'], - emptyDirsToDelete: ['/tmp/.codex/skills'], - violations: [], - conflicts: [], - excludedScanGlobs: ['**/.git/**'] - }) - expect(nativeBindingMocks.planCleanup).toHaveBeenCalledOnce() - - const planSnapshot = JSON.parse(String(nativeBindingMocks.planCleanup.mock.calls[0]?.[0])) as { - readonly pluginSnapshots: readonly {pluginName: string, outputs: readonly string[], cleanup: {delete?: readonly {kind: string}[]}}[] - } - expect(planSnapshot.pluginSnapshots).toEqual([ - expect.objectContaining({ - pluginName: 'MockOutputPlugin', - outputs: ['/tmp/project-a/AGENTS.md'], - cleanup: expect.objectContaining({ - delete: [expect.objectContaining({kind: 'glob'})] - }) - }) - ]) - - const result = await performCleanup([plugin], cleanCtx, createMockLogger()) - expect(result).toEqual({ - deletedFiles: 1, - deletedDirs: 3, - errors: [], - violations: [], - conflicts: [] - }) - expect(nativeBindingMocks.performCleanup).toHaveBeenCalledOnce() - }) -}) diff --git a/sdk/src/commands/CleanupUtils.test.ts b/sdk/src/commands/CleanupUtils.test.ts deleted file mode 100644 index 9d4f9f62..00000000 --- a/sdk/src/commands/CleanupUtils.test.ts +++ /dev/null @@ -1,782 +0,0 @@ -import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../plugins/plugin-core' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {FilePathKind, IDEKind, PluginKind} from '../plugins/plugin-core' -import {collectDeletionTargets, performCleanup} from './CleanupUtils' - -function createMockLogger(): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger -} - -function createRecordingLogger(): ILogger & {debugMessages: unknown[]} { - const debugMessages: unknown[] = [] - - return { - debugMessages, - trace: () => {}, - debug: message => { - debugMessages.push(message) - }, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger & {debugMessages: unknown[]} -} - -function createCleanContext( - overrides?: Partial, - pluginOptionsOverrides?: Parameters[0] -): OutputCleanContext { - const workspaceDir = path.resolve('tmp-cleanup-utils-workspace') - return { - logger: createMockLogger(), - fs, - path, - glob, - dryRun: true, - pluginOptions: mergeConfig(pluginOptionsOverrides ?? {}), - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - }, - ...overrides - } - } as OutputCleanContext -} - -function createMockOutputPlugin(name: string, outputs: readonly string[], cleanup?: OutputCleanupDeclarations): OutputPlugin { - return { - type: PluginKind.Output, - name, - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return outputs.map(output => ({path: output, source: {}})) - }, - async declareCleanupPaths() { - return cleanup ?? {} - }, - async convertContent() { - return '' - } - } -} - -describe('collectDeletionTargets', () => { - it('throws when an output path matches a protected input source file', async () => { - const editorSource = path.resolve('tmp-aindex/public/.editorconfig') - const ignoreSource = path.resolve('tmp-aindex/public/.cursorignore') - - const ctx = createCleanContext({ - editorConfigFiles: [ - { - type: IDEKind.EditorConfig, - content: 'root = true', - length: 11, - filePathKind: FilePathKind.Absolute, - dir: { - pathKind: FilePathKind.Absolute, - path: editorSource, - getDirectoryName: () => '.editorconfig' - } - } - ], - aiAgentIgnoreConfigFiles: [ - { - fileName: '.cursorignore', - content: 'node_modules', - sourcePath: ignoreSource - } - ] - }) - - const plugin = createMockOutputPlugin('MockOutputPlugin', [editorSource, ignoreSource]) - - await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') - }) - - it('keeps non-overlapping output paths for cleanup', async () => { - const outputA = path.resolve('tmp-out/a.md') - const outputB = path.resolve('tmp-out/b.md') - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [outputA, outputB]) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(new Set(result.filesToDelete)).toEqual(new Set([outputA, outputB])) - expect(result.violations).toEqual([]) - }) - - it('throws when an output path matches a known aindex protected config file', async () => { - const aindexDir = path.resolve('tmp-aindex') - const editorConfigOutput = path.resolve(aindexDir, 'public', '.editorconfig') - const ctx = createCleanContext({aindexDir}) - const plugin = createMockOutputPlugin('MockOutputPlugin', [editorConfigOutput]) - - await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') - }) - - it('compacts nested delete targets to reduce IO', async () => { - const claudeBaseDir = path.resolve('tmp-out/.claude') - const ruleDir = path.join(claudeBaseDir, 'rules') - const ruleFile = path.join(ruleDir, 'a.md') - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [ruleFile], { - delete: [ - {kind: 'directory', path: claudeBaseDir}, - {kind: 'directory', path: ruleDir}, - {kind: 'file', path: ruleFile} - ] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([claudeBaseDir]) - expect(result.filesToDelete).toEqual([]) - }) - - it('skips parent deletion when a protected child path exists', async () => { - const codexBaseDir = path.resolve('tmp-out/.codex') - const promptsDir = path.join(codexBaseDir, 'prompts') - const protectedSystemDir = path.join(codexBaseDir, 'skills', '.system') - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [ - {kind: 'directory', path: codexBaseDir}, - {kind: 'directory', path: promptsDir} - ], - protect: [{kind: 'directory', path: protectedSystemDir}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([promptsDir]) - expect(result.violations.map(violation => violation.targetPath)).toEqual([codexBaseDir]) - }) - - it('blocks deleting dangerous roots and returns the most specific matching rule', async () => { - const homeDir = os.homedir() - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [{kind: 'directory', path: homeDir}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([]) - expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual([ - expect.objectContaining({ - targetPath: path.resolve(homeDir), - protectedPath: path.resolve('tmp-cleanup-utils-workspace', 'knowladge'), - protectionMode: 'direct' - }) - ]) - }) - - it('throws when an output path matches a built-in protected path before directory guards run', async () => { - const workspaceDir = path.resolve('tmp-workspace-root') - const projectRoot = path.join(workspaceDir, 'project-a') - const aindexDir = path.join(workspaceDir, 'aindex') - const globalAindexDir = path.join(os.homedir(), '.aindex') - const globalConfigPath = path.join(globalAindexDir, '.tnmsc.json') - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [ - { - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => projectRoot - } - } - ] - }, - aindexDir - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [globalConfigPath], { - delete: [ - {kind: 'directory', path: globalAindexDir}, - {kind: 'directory', path: workspaceDir}, - {kind: 'directory', path: projectRoot}, - {kind: 'directory', path: aindexDir} - ] - }) - - await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow( - `Cleanup protection conflict: 1 output path(s) are also protected: ${path.resolve(globalConfigPath)}` - ) - }) - - it('allows deleting non-mdx files under dist while blocking reserved dist mdx files', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-dist-mdx-')) - const workspaceDir = path.join(tempDir, 'workspace') - const distCommandDir = path.join(workspaceDir, 'aindex', 'dist', 'commands') - const projectChildFile = path.join(workspaceDir, 'project-a', 'AGENTS.md') - const protectedDistMdxFile = path.join(distCommandDir, 'demo.mdx') - const safeDistMarkdownFile = path.join(distCommandDir, 'README.md') - const globalChildDir = path.join(os.homedir(), '.aindex', '.codex', 'prompts') - const aindexSourceDir = path.join(workspaceDir, 'aindex', 'commands') - - fs.mkdirSync(path.dirname(projectChildFile), {recursive: true}) - fs.mkdirSync(distCommandDir, {recursive: true}) - fs.mkdirSync(aindexSourceDir, {recursive: true}) - fs.writeFileSync(projectChildFile, '# agent', 'utf8') - fs.writeFileSync(protectedDistMdxFile, '# compiled', 'utf8') - fs.writeFileSync(safeDistMarkdownFile, '# doc', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [ - { - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceDir, 'project-a') - } - } - ] - }, - aindexDir: path.join(workspaceDir, 'aindex') - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [projectChildFile, safeDistMarkdownFile], { - delete: [ - {kind: 'file', path: protectedDistMdxFile}, - {kind: 'directory', path: globalChildDir}, - {kind: 'directory', path: aindexSourceDir} - ] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(new Set(result.filesToDelete)).toEqual(new Set([path.resolve(projectChildFile), path.resolve(safeDistMarkdownFile)])) - const allDirsToDelete = [...result.dirsToDelete, ...result.emptyDirsToDelete] - expect(new Set(allDirsToDelete)).toEqual(new Set([path.resolve(globalChildDir), path.resolve(aindexSourceDir), path.resolve(workspaceDir, 'project-a')])) - expect(result.violations).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - targetPath: path.resolve(protectedDistMdxFile), - protectionMode: 'direct', - protectedPath: path.resolve(protectedDistMdxFile) - }), - expect.objectContaining({targetPath: path.resolve(aindexSourceDir)}) - ]) - ) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('blocks deleting a dist directory when protected mdx descendants exist', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-dist-dir-')) - const workspaceDir = path.join(tempDir, 'workspace') - const distCommandDir = path.join(workspaceDir, 'aindex', 'dist', 'commands') - const protectedDistMdxFile = path.join(distCommandDir, 'demo.mdx') - - fs.mkdirSync(distCommandDir, {recursive: true}) - fs.writeFileSync(protectedDistMdxFile, '# compiled', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - }, - aindexDir: path.join(workspaceDir, 'aindex') - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [{kind: 'directory', path: distCommandDir}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([]) - expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual([ - expect.objectContaining({ - targetPath: path.resolve(distCommandDir), - protectionMode: 'direct', - protectedPath: path.resolve(protectedDistMdxFile) - }) - ]) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('allows deleting non-mdx files under app while blocking reserved app mdx files', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-app-mdx-')) - const workspaceDir = path.join(tempDir, 'workspace') - const appDir = path.join(workspaceDir, 'aindex', 'app') - const protectedAppMdxFile = path.join(appDir, 'guide.mdx') - const safeAppMarkdownFile = path.join(appDir, 'README.md') - - fs.mkdirSync(appDir, {recursive: true}) - fs.writeFileSync(protectedAppMdxFile, '# app guide', 'utf8') - fs.writeFileSync(safeAppMarkdownFile, '# readme', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - }, - aindexDir: path.join(workspaceDir, 'aindex') - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [safeAppMarkdownFile], { - delete: [{kind: 'file', path: protectedAppMdxFile}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - targetPath: path.resolve(protectedAppMdxFile), - protectionMode: 'direct', - protectedPath: path.resolve(protectedAppMdxFile) - }), - expect.objectContaining({targetPath: path.resolve(safeAppMarkdownFile)}) - ]) - ) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('throws when an output file path exactly matches a cleanup protect declaration', async () => { - const outputPath = path.resolve('tmp-out/protected.md') - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [outputPath], { - protect: [{kind: 'file', path: outputPath}] - }) - - await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') - }) - - it('blocks deleting an app directory when protected mdx descendants exist', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-app-dir-')) - const workspaceDir = path.join(tempDir, 'workspace') - const appSubDir = path.join(workspaceDir, 'aindex', 'app', 'nested') - const protectedAppMdxFile = path.join(appSubDir, 'guide.mdx') - - fs.mkdirSync(appSubDir, {recursive: true}) - fs.writeFileSync(protectedAppMdxFile, '# app guide', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - }, - aindexDir: path.join(workspaceDir, 'aindex') - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [{kind: 'directory', path: path.join(workspaceDir, 'aindex', 'app')}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([]) - expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual([ - expect.objectContaining({ - targetPath: path.resolve(path.join(workspaceDir, 'aindex', 'app')), - protectionMode: 'direct', - protectedPath: path.resolve(protectedAppMdxFile) - }) - ]) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('blocks symlink targets that resolve to a protected path and keeps the most specific match', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-guard-')) - const workspaceDir = path.join(tempDir, 'workspace') - const symlinkPath = path.join(tempDir, 'workspace-link') - - fs.mkdirSync(workspaceDir, {recursive: true}) - - try { - const symlinkType: 'junction' | 'dir' = process.platform === 'win32' ? 'junction' : 'dir' - fs.symlinkSync(workspaceDir, symlinkPath, symlinkType) - - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [{kind: 'directory', path: symlinkPath}] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([]) - expect(result.violations).toEqual([ - expect.objectContaining({ - targetPath: path.resolve(symlinkPath), - protectedPath: path.resolve(path.join(workspaceDir, 'knowladge')), - protectionMode: 'direct' - }) - ]) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('lets direct protect declarations keep descendants deletable while recursive protect declarations block them', async () => { - const workspaceDir = path.resolve('tmp-direct-vs-recursive') - const directProtectedDir = path.join(workspaceDir, 'project-a') - const recursiveProtectedDir = path.join(workspaceDir, 'aindex', 'dist') - const directChildFile = path.join(directProtectedDir, 'AGENTS.md') - const recursiveChildFile = path.join(recursiveProtectedDir, 'commands', 'demo.mdx') - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [directChildFile, recursiveChildFile], { - protect: [ - {kind: 'directory', path: directProtectedDir, protectionMode: 'direct'}, - {kind: 'directory', path: recursiveProtectedDir, protectionMode: 'recursive'} - ] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.filesToDelete).toEqual([path.resolve(directChildFile)]) - expect(result.violations).toEqual([ - expect.objectContaining({ - targetPath: path.resolve(recursiveChildFile), - protectionMode: 'recursive', - protectedPath: path.resolve(recursiveProtectedDir) - }) - ]) - }) - - it('skips delete glob matches covered by excludeScanGlobs while still deleting other sibling directories', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-exclude-glob-')) - const skillsDir = path.join(tempDir, '.cursor', 'skills-cursor') - const preservedDir = path.join(skillsDir, 'create-rule') - const staleDir = path.join(skillsDir, 'legacy-skill') - - fs.mkdirSync(preservedDir, {recursive: true}) - fs.mkdirSync(staleDir, {recursive: true}) - fs.writeFileSync(path.join(preservedDir, 'SKILL.md'), '# preserved', 'utf8') - fs.writeFileSync(path.join(staleDir, 'SKILL.md'), '# stale', 'utf8') - - try { - const ctx = createCleanContext() - const plugin = createMockOutputPlugin('MockOutputPlugin', [], { - delete: [{kind: 'glob', path: path.join(skillsDir, '*')}], - protect: [{kind: 'directory', path: preservedDir}], - excludeScanGlobs: [preservedDir, path.join(preservedDir, '**')] - }) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.dirsToDelete).toEqual([path.resolve(staleDir)]) - expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual([]) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('throws when an output path matches the configured workspace prompt source file', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-workspace-src-')) - const workspaceDir = path.join(tempDir, 'workspace') - const aindexDir = path.join(workspaceDir, 'aindex-meta') - const workspacePromptSource = path.join(aindexDir, 'meta', 'workspace.src.mdx') - - fs.mkdirSync(path.dirname(workspacePromptSource), {recursive: true}) - fs.writeFileSync(workspacePromptSource, '# workspace', 'utf8') - - try { - const ctx = createCleanContext( - { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - }, - aindexDir - }, - { - workspaceDir, - aindex: { - dir: 'aindex-meta', - workspacePrompt: { - src: 'meta/workspace.src.mdx', - dist: 'compiled/workspace.mdx' - } - } - } as Parameters[0] - ) - const plugin = createMockOutputPlugin('MockOutputPlugin', [workspacePromptSource]) - - await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('plans workspace empty directories while skipping excluded trees and symlink entries', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-empty-sweep-')) - const workspaceDir = path.join(tempDir, 'workspace') - const sourceLeafDir = path.join(workspaceDir, 'source', 'empty', 'leaf') - const sourceKeepFile = path.join(workspaceDir, 'source', 'keep.md') - const distEmptyDir = path.join(workspaceDir, 'dist', 'ghost') - const nodeModulesEmptyDir = path.join(workspaceDir, 'node_modules', 'pkg', 'ghost') - const gitEmptyDir = path.join(workspaceDir, '.git', 'objects', 'info') - const symlinkTarget = path.join(tempDir, 'symlink-target') - const symlinkParentDir = path.join(workspaceDir, 'symlink-parent') - const symlinkPath = path.join(symlinkParentDir, 'linked') - - fs.mkdirSync(sourceLeafDir, {recursive: true}) - fs.mkdirSync(path.dirname(sourceKeepFile), {recursive: true}) - fs.mkdirSync(distEmptyDir, {recursive: true}) - fs.mkdirSync(nodeModulesEmptyDir, {recursive: true}) - fs.mkdirSync(gitEmptyDir, {recursive: true}) - fs.mkdirSync(symlinkTarget, {recursive: true}) - fs.mkdirSync(symlinkParentDir, {recursive: true}) - fs.writeFileSync(sourceKeepFile, '# keep', 'utf8') - - try { - const symlinkType: 'junction' | 'dir' = process.platform === 'win32' ? 'junction' : 'dir' - fs.symlinkSync(symlinkTarget, symlinkPath, symlinkType) - - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', []) - - const result = await collectDeletionTargets([plugin], ctx) - - expect(result.filesToDelete).toEqual([]) - expect(result.dirsToDelete).toEqual([]) - expect(result.emptyDirsToDelete).toEqual([path.resolve(workspaceDir, 'source', 'empty'), path.resolve(sourceLeafDir)]) - expect(result.emptyDirsToDelete).not.toContain(path.resolve(workspaceDir)) - expect(result.emptyDirsToDelete).not.toContain(path.resolve(distEmptyDir)) - expect(result.emptyDirsToDelete).not.toContain(path.resolve(nodeModulesEmptyDir)) - expect(result.emptyDirsToDelete).not.toContain(path.resolve(gitEmptyDir)) - expect(result.emptyDirsToDelete).not.toContain(path.resolve(symlinkParentDir)) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) -}) - -describe('performCleanup', () => { - it('deletes files and directories in one cleanup pass', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-perform-cleanup-')) - const outputFile = path.join(tempDir, 'project-a', 'AGENTS.md') - const outputDir = path.join(tempDir, '.codex', 'prompts') - const stalePrompt = path.join(outputDir, 'demo.md') - - fs.mkdirSync(path.dirname(outputFile), {recursive: true}) - fs.mkdirSync(outputDir, {recursive: true}) - fs.writeFileSync(outputFile, '# agent', 'utf8') - fs.writeFileSync(stalePrompt, '# prompt', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: tempDir, - getDirectoryName: () => path.basename(tempDir), - getAbsolutePath: () => tempDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [outputFile], { - delete: [{kind: 'directory', path: outputDir}] - }) - - const result = await performCleanup([plugin], ctx, createMockLogger()) - - expect(result).toEqual( - expect.objectContaining({ - deletedFiles: 1, - deletedDirs: 3, - errors: [], - violations: [], - conflicts: [] - }) - ) - expect(fs.existsSync(outputFile)).toBe(false) - expect(fs.existsSync(outputDir)).toBe(false) - expect(fs.existsSync(path.dirname(outputFile))).toBe(false) - expect(fs.existsSync(path.dirname(outputDir))).toBe(false) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('logs aggregated cleanup execution summaries instead of per-path success logs', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-perform-cleanup-logging-')) - const outputFile = path.join(tempDir, 'project-a', 'AGENTS.md') - const outputDir = path.join(tempDir, '.codex', 'prompts') - const stalePrompt = path.join(outputDir, 'demo.md') - const logger = createRecordingLogger() - - fs.mkdirSync(path.dirname(outputFile), {recursive: true}) - fs.mkdirSync(outputDir, {recursive: true}) - fs.writeFileSync(outputFile, '# agent', 'utf8') - fs.writeFileSync(stalePrompt, '# prompt', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: tempDir, - getDirectoryName: () => path.basename(tempDir), - getAbsolutePath: () => tempDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [outputFile], { - delete: [{kind: 'directory', path: outputDir}] - }) - - await performCleanup([plugin], ctx, logger) - - expect(logger.debugMessages).toEqual( - expect.arrayContaining(['cleanup plan built', 'cleanup delete execution started', 'cleanup delete execution complete']) - ) - expect(logger.debugMessages).not.toContainEqual(expect.objectContaining({path: outputFile})) - expect(logger.debugMessages).not.toContainEqual(expect.objectContaining({path: outputDir})) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) - - it('deletes generated files and then prunes workspace empty directories', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-perform-cleanup-empty-sweep-')) - const outputFile = path.join(tempDir, 'generated', 'AGENTS.md') - const emptyLeafDir = path.join(tempDir, 'scratch', 'empty', 'leaf') - const retainedScratchFile = path.join(tempDir, 'scratch', 'keep.md') - - fs.mkdirSync(path.dirname(outputFile), {recursive: true}) - fs.mkdirSync(emptyLeafDir, {recursive: true}) - fs.mkdirSync(path.dirname(retainedScratchFile), {recursive: true}) - fs.writeFileSync(outputFile, '# agent', 'utf8') - fs.writeFileSync(retainedScratchFile, '# keep', 'utf8') - - try { - const ctx = createCleanContext({ - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: tempDir, - getDirectoryName: () => path.basename(tempDir), - getAbsolutePath: () => tempDir - }, - projects: [] - } - }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [outputFile]) - - const result = await performCleanup([plugin], ctx, createMockLogger()) - - expect(result).toEqual( - expect.objectContaining({ - deletedFiles: 1, - deletedDirs: 3, - errors: [], - violations: [], - conflicts: [] - }) - ) - expect(fs.existsSync(outputFile)).toBe(false) - expect(fs.existsSync(path.dirname(outputFile))).toBe(false) - expect(fs.existsSync(path.join(tempDir, 'scratch', 'empty', 'leaf'))).toBe(false) - expect(fs.existsSync(path.join(tempDir, 'scratch', 'empty'))).toBe(false) - expect(fs.existsSync(path.join(tempDir, 'scratch'))).toBe(true) - } finally { - fs.rmSync(tempDir, {recursive: true, force: true}) - } - }) -}) diff --git a/sdk/src/commands/CommandFactory.ts b/sdk/src/commands/CommandFactory.ts deleted file mode 100644 index 3604485f..00000000 --- a/sdk/src/commands/CommandFactory.ts +++ /dev/null @@ -1,29 +0,0 @@ -import type {Command} from './Command' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' - -/** - * Command factory interface - * Each factory knows how to create a specific command based on CLI args - */ -export interface CommandFactory { - canHandle: (args: ParsedCliArgs) => boolean - - createCommand: (args: ParsedCliArgs) => Command -} - -/** - * Priority levels for command factory resolution - * Lower number = higher priority - */ -export enum FactoryPriority { - Flags = 0, // --version, --help flags (highest priority) - Unknown = 1, // Unknown command handling - Subcommand = 2 // Named subcommands -} - -/** - * Extended factory interface with priority - */ -export interface PrioritizedCommandFactory extends CommandFactory { - readonly priority: FactoryPriority -} diff --git a/sdk/src/commands/ConfigCommand.ts b/sdk/src/commands/ConfigCommand.ts deleted file mode 100644 index 68b10277..00000000 --- a/sdk/src/commands/ConfigCommand.ts +++ /dev/null @@ -1,237 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import type {AindexConfigKeyPath} from '@/plugins/plugin-core' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {buildUsageDiagnostic, diagnosticLines} from '@/diagnostics' -import {AINDEX_CONFIG_KEY_PATHS} from '@/plugins/plugin-core' -import {getRequiredGlobalConfigPath} from '@/runtime-environment' - -/** - * Valid configuration keys that can be set via `tnmsc config key=value`. - * Nested keys use dot-notation: aindex.skills.src, aindex.commands.src, etc. - */ -type ValidConfigKey = 'workspaceDir' | 'logLevel' | AindexConfigKeyPath - -const VALID_CONFIG_KEYS: readonly ValidConfigKey[] = [ - 'workspaceDir', - ...AINDEX_CONFIG_KEY_PATHS, - 'logLevel' -] - -/** - * Validate if a key is a valid config key - */ -function isValidConfigKey(key: string): key is ValidConfigKey { - return VALID_CONFIG_KEYS.includes(key as ValidConfigKey) -} - -/** - * Validate log level value - */ -function isValidLogLevel(value: string): boolean { - const validLevels = ['trace', 'debug', 'info', 'warn', 'error'] - return validLevels.includes(value) -} - -/** - * Get global config file path - */ -function getGlobalConfigPath(): string { - return getRequiredGlobalConfigPath() -} - -/** - * Read global config file - */ -function readGlobalConfig(): ConfigObject { - const configPath = getGlobalConfigPath() - if (!fs.existsSync(configPath)) return {} - try { - const content = fs.readFileSync(configPath, 'utf8') - return JSON.parse(content) as ConfigObject - } - catch { - return {} - } -} - -/** - * Write global config file - */ -function writeGlobalConfig(config: ConfigObject): void { - const configPath = getGlobalConfigPath() - const configDir = path.dirname(configPath) - - if (!fs.existsSync(configDir)) fs.mkdirSync(configDir, {recursive: true}) // Ensure directory exists - - fs.writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, 'utf8') // Write with pretty formatting -} - -type ConfigValue = string | ConfigObject -interface ConfigObject { - [key: string]: ConfigValue | undefined -} - -/** - * Set a nested value in an object using dot-notation key - */ -function setNestedValue(obj: ConfigObject, key: string, value: string): void { - const parts = key.split('.') - let current: ConfigObject = obj - for (let i = 0; i < parts.length - 1; i++) { - const part = parts[i] - if (part == null) continue - const next = current[part] - if (typeof next !== 'object' || next === null || Array.isArray(next)) current[part] = {} - current = current[part] as ConfigObject - } - - const lastPart = parts.at(-1) - if (lastPart == null) return - current[lastPart] = value -} - -/** - * Get a nested value from an object using dot-notation key - */ -function getNestedValue(obj: ConfigObject, key: string): ConfigValue | undefined { - const parts = key.split('.') - let current: ConfigValue | undefined = obj - for (const part of parts) { - if (typeof current !== 'object' || current === null || Array.isArray(current)) return void 0 - current = current[part] - } - return current -} - -export class ConfigCommand implements Command { - readonly name = 'config' - - constructor( - private readonly options: readonly [key: string, value: string][] - ) { } - - async execute(ctx: CommandContext): Promise { - const {logger} = ctx - - if (this.options.length === 0) { - logger.error(buildUsageDiagnostic({ - code: 'CONFIG_COMMAND_ARGUMENTS_MISSING', - title: 'Config command requires at least one key=value pair', - rootCause: diagnosticLines( - 'tnmsc config was invoked without any configuration assignments.' - ), - exactFix: diagnosticLines( - 'Run `tnmsc config key=value` with at least one supported configuration key.' - ), - possibleFixes: [ - diagnosticLines(`Use one of the supported keys: ${VALID_CONFIG_KEYS.join(', ')}`) - ], - details: { - validKeys: [...VALID_CONFIG_KEYS] - } - })) - logger.info('Usage: tnmsc config key=value') - logger.info(`Valid keys: ${VALID_CONFIG_KEYS.join(', ')}`) - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: 'No options provided' - } - } - - let config: ConfigObject - - try { - config = readGlobalConfig() - } - catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: errorMessage - } - } - - const errors: string[] = [] - const updated: string[] = [] - - for (const [key, value] of this.options) { // Process each key-value pair - if (!isValidConfigKey(key)) { - errors.push(`Invalid key: ${key}`) - logger.error(buildUsageDiagnostic({ - code: 'CONFIG_COMMAND_KEY_INVALID', - title: `Unsupported config key: ${key}`, - rootCause: diagnosticLines( - `The config command received "${key}", which is not a supported configuration key.` - ), - exactFix: diagnosticLines('Use one of the supported config keys and rerun the command.'), - possibleFixes: [ - diagnosticLines(`Supported keys: ${VALID_CONFIG_KEYS.join(', ')}`) - ], - details: { - key, - validKeys: [...VALID_CONFIG_KEYS] - } - })) - continue - } - - if (key === 'logLevel' && !isValidLogLevel(value)) { // Special validation for logLevel - errors.push(`Invalid logLevel value: ${value}`) - logger.error(buildUsageDiagnostic({ - code: 'CONFIG_COMMAND_LOG_LEVEL_INVALID', - title: `Unsupported logLevel value: ${value}`, - rootCause: diagnosticLines( - `The config command received "${value}" for logLevel, but tnmsc does not support that level.` - ), - exactFix: diagnosticLines('Set logLevel to one of: trace, debug, info, warn, or error.'), - details: { - key, - value, - validLevels: ['trace', 'debug', 'info', 'warn', 'error'] - } - })) - continue - } - - const oldValue = getNestedValue(config, key) // Update config - setNestedValue(config, key, value) - - if (oldValue !== value) updated.push(`${key}=${value}`) - - logger.info('configuration updated', {key, value}) - } - - if (updated.length > 0) { // Write config if there are valid updates - try { - writeGlobalConfig(config) - } - catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: errorMessage - } - } - logger.info('global config written', {path: getGlobalConfigPath()}) - } - - const success = errors.length === 0 - const message = success - ? `Configuration updated: ${updated.join(', ')}` - : `Partial update: ${updated.join(', ')}. Errors: ${errors.join(', ')}` - - return { - success, - filesAffected: updated.length > 0 ? 1 : 0, - dirsAffected: 0, - message - } - } -} diff --git a/sdk/src/commands/ConfigShowCommand.ts b/sdk/src/commands/ConfigShowCommand.ts deleted file mode 100644 index 2a21822a..00000000 --- a/sdk/src/commands/ConfigShowCommand.ts +++ /dev/null @@ -1,48 +0,0 @@ -import type {Command, CommandContext, CommandResult, ConfigSource, JsonConfigInfo} from './Command' -import process from 'node:process' -import {ConfigLoader} from '@/ConfigLoader' - -/** - * Command that outputs the current merged configuration and its source layers as JSON. - * - * Invoked via `tnmsc config --show --json`. - * Writes a `JsonConfigInfo` object to stdout containing: - * - `merged`: the final merged UserConfigFile - * - `sources`: an array of ConfigSource entries describing each layer - * - * When used without `--json`, logs the config info via the logger. - */ -export class ConfigShowCommand implements Command { - readonly name = 'config-show' - - async execute(ctx: CommandContext): Promise { - const {logger} = ctx - const loader = new ConfigLoader() - const mergedResult = loader.load() - - const sources: ConfigSource[] = mergedResult.sources.map(sourcePath => { - const loaded = loader.loadFromFile(sourcePath) - return { - path: sourcePath, - layer: 'global', - config: loaded.config - } - }) - - const configInfo: JsonConfigInfo = { - merged: mergedResult.config, - sources - } - - process.stdout.write(`${JSON.stringify(configInfo)}\n`) - - logger.info('config shown', {sources: mergedResult.sources.length}) - - return { - success: true, - filesAffected: 0, - dirsAffected: 0, - message: `Configuration displayed (${sources.length} source(s))` - } - } -} diff --git a/sdk/src/commands/InitCommand.test.ts b/sdk/src/commands/InitCommand.test.ts deleted file mode 100644 index 3224c8f6..00000000 --- a/sdk/src/commands/InitCommand.test.ts +++ /dev/null @@ -1,78 +0,0 @@ -import type {CommandContext} from './Command' -import * as fs from 'node:fs' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it} from 'vitest' -import {mergeConfig} from '../config' -import {createLogger, FilePathKind} from '../plugins/plugin-core' -import {InitCommand} from './InitCommand' - -function createCommandContext(): CommandContext { - const workspaceDir = path.resolve('tmp-init-command') - const userConfigOptions = mergeConfig({workspaceDir}) - - return { - logger: createLogger('InitCommandTest', 'error'), - outputPlugins: [], - userConfigOptions, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - }, - createCleanContext: dryRun => ({ - logger: createLogger('InitCommandTest', 'error'), - fs, - path, - glob, - dryRun, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - } - }) as CommandContext['createCleanContext'] extends (dryRun: boolean) => infer T ? T : never, - createWriteContext: dryRun => ({ - logger: createLogger('InitCommandTest', 'error'), - fs, - path, - glob, - dryRun, - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [] - } - } - }) as CommandContext['createWriteContext'] extends (dryRun: boolean) => infer T ? T : never - } -} - -describe('init command', () => { - it('returns a deprecation failure without creating files', async () => { - const result = await new InitCommand().execute(createCommandContext()) - - expect(result.success).toBe(false) - expect(result.filesAffected).toBe(0) - expect(result.dirsAffected).toBe(0) - expect(result.message).toContain('deprecated') - expect(result.message).toContain('~/workspace/aindex/public/') - }) -}) diff --git a/sdk/src/commands/InitCommand.ts b/sdk/src/commands/InitCommand.ts deleted file mode 100644 index 98180fcc..00000000 --- a/sdk/src/commands/InitCommand.ts +++ /dev/null @@ -1,36 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {buildUsageDiagnostic, diagnosticLines} from '@/diagnostics' - -const INIT_DEPRECATION_MESSAGE = '`tnmsc init` is deprecated and no longer initializes aindex. Maintain the public target-relative definitions manually under `~/workspace/aindex/public/`.' - -export class InitCommand implements Command { - readonly name = 'init' - - async execute(ctx: CommandContext): Promise { - const {logger} = ctx - - logger.warn(buildUsageDiagnostic({ - code: 'INIT_COMMAND_DEPRECATED', - title: 'The init command is deprecated', - rootCause: diagnosticLines( - '`tnmsc init` no longer initializes aindex content or project definitions.' - ), - exactFix: diagnosticLines( - 'Maintain the target-relative definitions manually under `~/workspace/aindex/public/`.' - ), - possibleFixes: [ - diagnosticLines('Run `tnmsc help` to find a supported replacement command for your workflow.') - ], - details: { - command: 'init' - } - })) - - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: INIT_DEPRECATION_MESSAGE - } - } -} diff --git a/sdk/src/commands/JsonOutputCommand.ts b/sdk/src/commands/JsonOutputCommand.ts deleted file mode 100644 index 3123e96c..00000000 --- a/sdk/src/commands/JsonOutputCommand.ts +++ /dev/null @@ -1,56 +0,0 @@ -import type {Command, CommandContext, CommandResult, JsonCommandResult} from './Command' -import process from 'node:process' -import {partitionBufferedDiagnostics} from '@/diagnostics' -import {clearBufferedDiagnostics, drainBufferedDiagnostics} from '@/plugins/plugin-core' - -/** - * Decorator command that wraps any Command to produce JSON output on stdout. - * - * When the `--json` flag is detected, this wrapper: - * 1. Suppresses all Winston console logging (sets global log level to 'silent') - * 2. Delegates execution to the inner command - * 3. Converts the CommandResult to a JsonCommandResult - * 4. Writes the JSON string to stdout - * - * This ensures clean, parseable JSON output for consumption by - * Tauri sidecar or other external tooling. - */ -export class JsonOutputCommand implements Command { - readonly name: string - private readonly inner: Command - - constructor(inner: Command) { - this.inner = inner - this.name = `json:${inner.name}` - } - - async execute(ctx: CommandContext): Promise { - clearBufferedDiagnostics() - const result = await this.inner.execute(ctx) - const jsonResult = toJsonCommandResult(result, drainBufferedDiagnostics()) - process.stdout.write(`${JSON.stringify(jsonResult)}\n`) - return result - } -} - -/** - * Convert a CommandResult to a JsonCommandResult. - * Maps the base result fields and initialises optional arrays as empty - * when not present, ensuring a consistent JSON shape. - */ -export function toJsonCommandResult( - result: CommandResult, - diagnostics = drainBufferedDiagnostics() -): JsonCommandResult { - const {warnings, errors} = partitionBufferedDiagnostics(diagnostics) - const json: JsonCommandResult = { - success: result.success, - filesAffected: result.filesAffected, - dirsAffected: result.dirsAffected, - ...result.message != null && {message: result.message}, - pluginResults: [], - warnings, - errors - } - return json -} diff --git a/sdk/src/commands/PluginsCommand.ts b/sdk/src/commands/PluginsCommand.ts deleted file mode 100644 index 8f284a06..00000000 --- a/sdk/src/commands/PluginsCommand.ts +++ /dev/null @@ -1,54 +0,0 @@ -import type {Command, CommandContext, CommandResult, JsonPluginInfo} from './Command' -import process from 'node:process' - -/** - * Command that outputs all registered output plugin information as JSON. - * - * Invoked via `tnmsc plugins --json`. - * Writes a `JsonPluginInfo[]` array to stdout containing each output plugin's - * name, description, and dependency list. - * - * When used without `--json`, logs the plugin list via the logger. - */ -export class PluginsCommand implements Command { - readonly name = 'plugins' - - async execute(ctx: CommandContext): Promise { - const {logger, outputPlugins, userConfigOptions} = ctx - - const allPlugins = userConfigOptions.plugins - const pluginInfos: JsonPluginInfo[] = [] - - for (const plugin of allPlugins) { - pluginInfos.push({ - name: plugin.name, - kind: 'Output', - description: plugin.name, - dependencies: [...plugin.dependsOn ?? []] - }) - } - - const registeredNames = new Set(pluginInfos.map(p => p.name)) // (they are registered separately via registerOutputPlugins) // Also include output plugins that may not be in userConfigOptions.plugins - for (const plugin of outputPlugins) { - if (!registeredNames.has(plugin.name)) { - pluginInfos.push({ - name: plugin.name, - kind: 'Output', - description: plugin.name, - dependencies: [...plugin.dependsOn ?? []] - }) - } - } - - process.stdout.write(`${JSON.stringify(pluginInfos)}\n`) - - logger.info('plugins listed', {count: pluginInfos.length}) - - return { - success: true, - filesAffected: 0, - dirsAffected: 0, - message: `Listed ${pluginInfos.length} plugin(s)` - } - } -} diff --git a/sdk/src/commands/ProtectedDeletionCommands.test.ts b/sdk/src/commands/ProtectedDeletionCommands.test.ts deleted file mode 100644 index 3b431b65..00000000 --- a/sdk/src/commands/ProtectedDeletionCommands.test.ts +++ /dev/null @@ -1,277 +0,0 @@ -import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin, OutputWriteContext} from '../plugins/plugin-core' -import type {CommandContext} from './Command' -import * as fs from 'node:fs' -import * as path from 'node:path' -import glob from 'fast-glob' -import {describe, expect, it, vi} from 'vitest' -import {mergeConfig} from '../config' -import {createLogger, FilePathKind, PluginKind} from '../plugins/plugin-core' -import {CleanCommand} from './CleanCommand' -import {DryRunCleanCommand} from './DryRunCleanCommand' -import {ExecuteCommand} from './ExecuteCommand' -import {JsonOutputCommand} from './JsonOutputCommand' - -function createMockLogger(): ILogger { - return { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} - } as ILogger -} - -function createMockOutputPlugin( - cleanup?: OutputCleanupDeclarations, - convertContent?: OutputPlugin['convertContent'] -): OutputPlugin { - return { - type: PluginKind.Output, - name: 'MockOutputPlugin', - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return [{path: path.join(path.resolve('tmp-workspace-command'), 'project-a', 'AGENTS.md'), source: {}}] - }, - async declareCleanupPaths() { - return cleanup ?? {} - }, - async convertContent(declaration, ctx) { - if (convertContent != null) return convertContent(declaration, ctx) - return 'test' - } - } -} - -function createCommandContext( - outputPlugins: readonly OutputPlugin[], - workspaceDir: string = path.resolve('tmp-workspace-command') -): CommandContext { - const aindexDir = path.join(workspaceDir, 'aindex') - const userConfigOptions = mergeConfig({workspaceDir}) - const collectedOutputContext = { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir), - getAbsolutePath: () => workspaceDir - }, - projects: [{ - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceDir, 'project-a') - } - }] - }, - aindexDir - } - - return { - logger: createMockLogger(), - outputPlugins, - collectedOutputContext, - userConfigOptions, - createCleanContext: (dryRun: boolean): OutputCleanContext => ({ - logger: createMockLogger(), - fs, - path, - glob, - collectedOutputContext, - pluginOptions: userConfigOptions, - dryRun - }), - createWriteContext: (dryRun: boolean): OutputWriteContext => ({ - logger: createMockLogger(), - fs, - path, - glob, - collectedOutputContext, - dryRun, - registeredPluginNames: outputPlugins.map(plugin => plugin.name) - }) - } -} - -describe('protected deletion commands', () => { - it('returns failure for clean and dry-run-clean when cleanup hits a protected path', async () => { - const workspaceDir = path.resolve('tmp-workspace-command') - const plugin = createMockOutputPlugin({ - delete: [{kind: 'directory', path: workspaceDir}] - }) - const ctx = createCommandContext([plugin]) - - await expect(new CleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ - success: false, - message: expect.stringContaining('Protected deletion guard blocked cleanup') - })) - await expect(new DryRunCleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ - success: false, - message: expect.stringContaining('Protected deletion guard blocked cleanup') - })) - }) - - it('returns failure before writes run when execute pre-cleanup hits a protected path', async () => { - const workspaceDir = path.resolve('tmp-workspace-command') - const convertContent = vi.fn(async () => 'should-not-write') - const plugin = createMockOutputPlugin({ - delete: [{kind: 'directory', path: workspaceDir}] - }, convertContent) - const ctx = createCommandContext([plugin]) - - await expect(new ExecuteCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ - success: false, - message: expect.stringContaining('Protected deletion guard blocked cleanup') - })) - expect(convertContent).not.toHaveBeenCalled() - }) - - it('returns failure when an output path conflicts with a cleanup protect declaration', async () => { - const outputPath = path.join(path.resolve('tmp-workspace-command'), 'project-a', 'AGENTS.md') - const plugin = createMockOutputPlugin({ - protect: [{kind: 'file', path: outputPath}] - }) - const ctx = createCommandContext([plugin]) - - await expect(new CleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ - success: false, - message: expect.stringContaining('Cleanup protection conflict') - })) - }) - - it('reuses declared outputs across cleanup and write during execute', async () => { - const workspaceDir = path.resolve('tmp-workspace-command-cached') - const outputPath = path.join(workspaceDir, 'project-a', 'AGENTS.md') - let declareOutputFilesCalls = 0 - const plugin: OutputPlugin = { - type: PluginKind.Output, - name: 'CachedOutputPlugin', - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - declareOutputFilesCalls += 1 - return [{path: outputPath, source: {}}] - }, - async declareCleanupPaths() { - return {} - }, - async convertContent() { - return 'cached-output' - } - } - - fs.rmSync(workspaceDir, {recursive: true, force: true}) - fs.mkdirSync(path.join(workspaceDir, 'project-a'), {recursive: true}) - - try { - const ctx = createCommandContext([plugin], workspaceDir) - const result = await new ExecuteCommand().execute(ctx) - - expect(result.success).toBe(true) - expect(declareOutputFilesCalls).toBe(1) - expect(fs.readFileSync(outputPath, 'utf8')).toBe('cached-output') - } - finally { - fs.rmSync(workspaceDir, {recursive: true, force: true}) - } - }) - - it('includes structured diagnostics in JSON output errors', async () => { - const writeSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) - const command = new JsonOutputCommand({ - name: 'mock', - async execute(ctx) { - ctx.logger.error({ - code: 'MOCK_FAILURE', - title: 'Mock command failed', - rootCause: ['The mock command was forced to fail for JSON output testing.'], - exactFix: ['Update the mock command inputs so it no longer emits the test failure.'] - }) - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: 'blocked' - } - } - }) - - try { - await command.execute({ - ...createCommandContext([]), - logger: createLogger('ProtectedDeletionJsonTest', 'silent') - }) - expect(writeSpy).toHaveBeenCalledOnce() - const payload = JSON.parse(String(writeSpy.mock.calls[0]?.[0])) as { - readonly message?: string - readonly warnings: readonly unknown[] - readonly errors: readonly {code: string, title: string, rootCause: readonly string[], copyText: readonly string[]}[] - } - - expect(payload.message).toBe('blocked') - expect(payload.warnings).toEqual([]) - expect(payload.errors).toEqual([ - expect.objectContaining({ - code: 'MOCK_FAILURE', - title: 'Mock command failed', - rootCause: ['The mock command was forced to fail for JSON output testing.'], - copyText: expect.arrayContaining(['[MOCK_FAILURE] Mock command failed']) - }) - ]) - } - finally { - writeSpy.mockRestore() - } - }) - - it('includes workspace empty directories in clean dry-run results', async () => { - const workspaceDir = path.resolve('tmp-workspace-command-dry-run-empty') - const generatedDir = path.join(workspaceDir, 'generated') - const generatedFile = path.join(generatedDir, 'AGENTS.md') - const emptyLeafDir = path.join(workspaceDir, 'scratch', 'empty', 'leaf') - const retainedScratchFile = path.join(workspaceDir, 'scratch', 'keep.md') - const plugin: OutputPlugin = { - type: PluginKind.Output, - name: 'DryRunEmptyDirPlugin', - log: createMockLogger(), - declarativeOutput: true, - outputCapabilities: {}, - async declareOutputFiles() { - return [{path: generatedFile, source: {}}] - }, - async declareCleanupPaths() { - return {} - }, - async convertContent() { - return '' - } - } - - fs.rmSync(workspaceDir, {recursive: true, force: true}) - fs.mkdirSync(generatedDir, {recursive: true}) - fs.mkdirSync(emptyLeafDir, {recursive: true}) - fs.mkdirSync(path.dirname(retainedScratchFile), {recursive: true}) - fs.writeFileSync(generatedFile, '# generated', 'utf8') - fs.writeFileSync(retainedScratchFile, '# keep', 'utf8') - - try { - const ctx = createCommandContext([plugin], workspaceDir) - const result = await new DryRunCleanCommand().execute(ctx) - - expect(result).toEqual(expect.objectContaining({ - success: true, - filesAffected: 1, - dirsAffected: 3 - })) - } - finally { - fs.rmSync(workspaceDir, {recursive: true, force: true}) - } - }) -}) diff --git a/sdk/src/commands/UnknownCommand.ts b/sdk/src/commands/UnknownCommand.ts deleted file mode 100644 index 7a530f42..00000000 --- a/sdk/src/commands/UnknownCommand.ts +++ /dev/null @@ -1,34 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {buildUsageDiagnostic, diagnosticLines} from '@/diagnostics' - -/** - * Unknown command - displays error for unrecognized subcommands - */ -export class UnknownCommand implements Command { - readonly name = 'unknown' - - constructor(private readonly unknownCmd: string) { } - - async execute(ctx: CommandContext): Promise { - ctx.logger.error(buildUsageDiagnostic({ - code: 'UNKNOWN_COMMAND', - title: `Unknown tnmsc command: ${this.unknownCmd}`, - rootCause: diagnosticLines(`tnmsc does not recognize the "${this.unknownCmd}" subcommand.`), - exactFix: diagnosticLines('Run `tnmsc help` and invoke one of the supported commands.'), - possibleFixes: [ - diagnosticLines('Check the command spelling and remove unsupported aliases or flags.') - ], - details: { - command: this.unknownCmd - } - })) - ctx.logger.info('run "tnmsc help" for available commands') - - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: `Unknown command: ${this.unknownCmd}` - } - } -} diff --git a/sdk/src/commands/bridge.rs b/sdk/src/commands/bridge.rs deleted file mode 100644 index d3d18de0..00000000 --- a/sdk/src/commands/bridge.rs +++ /dev/null @@ -1,23 +0,0 @@ -use std::process::ExitCode; - -use crate::bridge::node::run_node_command; - -pub fn execute(json_mode: bool) -> ExitCode { - run_node_command("execute", json_mode, &[]) -} - -pub fn dry_run(json_mode: bool) -> ExitCode { - run_node_command("dry-run", json_mode, &[]) -} - -pub fn clean(json_mode: bool) -> ExitCode { - run_node_command("clean", json_mode, &[]) -} - -pub fn dry_run_clean(json_mode: bool) -> ExitCode { - run_node_command("clean", json_mode, &["--dry-run"]) -} - -pub fn plugins(json_mode: bool) -> ExitCode { - run_node_command("plugins", json_mode, &[]) -} diff --git a/sdk/src/commands/config_cmd.rs b/sdk/src/commands/config_cmd.rs deleted file mode 100644 index e7eb62b5..00000000 --- a/sdk/src/commands/config_cmd.rs +++ /dev/null @@ -1,108 +0,0 @@ -use std::process::ExitCode; - -use crate::diagnostic_helpers::{diagnostic, line, optional_details}; -use serde_json::json; -use tnmsc_logger::create_logger; - -use crate::core::config::{ConfigLoader, get_required_global_config_path}; - -pub fn execute(pairs: &[(String, String)]) -> ExitCode { - let logger = create_logger("config", None); - let result = match ConfigLoader::with_defaults().try_load(std::path::Path::new(".")) { - Ok(result) => result, - Err(error) => { - logger.error(diagnostic( - "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", - "Failed to resolve the global config path", - line("The runtime could not determine which global config file should be updated."), - Some(line( - "Ensure the required global config exists and retry the command.", - )), - None, - optional_details(json!({ "error": error })), - )); - return ExitCode::FAILURE; - } - }; - let mut config = result.config; - - for (key, value) in pairs { - match key.as_str() { - "workspaceDir" => config.workspace_dir = Some(value.clone()), - "logLevel" => config.log_level = Some(value.clone()), - _ => { - logger.warn(diagnostic( - "CONFIG_KEY_UNKNOWN", - "Unknown config key was ignored", - line("The provided config key is not supported by this command."), - Some(line( - "Use one of the supported keys: `workspaceDir`, `logLevel`.", - )), - None, - optional_details(json!({ "key": key })), - )); - } - } - } - - let config_path = match get_required_global_config_path() { - Ok(path) => path, - Err(error) => { - logger.error(diagnostic( - "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", - "Failed to resolve the global config path", - line("The runtime could not determine which global config file should be written."), - Some(line( - "Ensure the required global config exists and retry the command.", - )), - None, - optional_details(json!({ "error": error })), - )); - return ExitCode::FAILURE; - } - }; - match serde_json::to_string_pretty(&config) { - Ok(json) => { - if let Some(parent) = config_path.parent() { - let _ = std::fs::create_dir_all(parent); - } - match std::fs::write(&config_path, &json) { - Ok(()) => { - logger.info( - serde_json::Value::String(format!( - "Config saved to {}", - config_path.display() - )), - None, - ); - ExitCode::SUCCESS - } - Err(e) => { - logger.error(diagnostic( - "CONFIG_WRITE_FAILED", - "Failed to write the global config file", - line("The CLI generated the config JSON but could not write it to disk."), - Some(line("Check that the config path is writable and retry.")), - None, - optional_details(json!({ - "path": config_path.to_string_lossy(), - "error": e.to_string() - })), - )); - ExitCode::FAILURE - } - } - } - Err(e) => { - logger.error(diagnostic( - "CONFIG_SERIALIZATION_FAILED", - "Failed to serialize the config", - line("The config object could not be converted to JSON."), - None, - None, - optional_details(json!({ "error": e.to_string() })), - )); - ExitCode::FAILURE - } - } -} diff --git a/sdk/src/commands/config_show.rs b/sdk/src/commands/config_show.rs deleted file mode 100644 index 0c9be861..00000000 --- a/sdk/src/commands/config_show.rs +++ /dev/null @@ -1,44 +0,0 @@ -use std::process::ExitCode; - -use crate::diagnostic_helpers::{diagnostic, line, optional_details}; -use serde_json::json; -use tnmsc_logger::create_logger; - -use crate::core::config::ConfigLoader; - -pub fn execute() -> ExitCode { - let logger = create_logger("config-show", None); - let result = match ConfigLoader::with_defaults().try_load(std::path::Path::new(".")) { - Ok(result) => result, - Err(error) => { - logger.error(diagnostic( - "GLOBAL_CONFIG_PATH_RESOLUTION_FAILED", - "Failed to resolve the global config path", - line("The runtime could not determine which global config file should be shown."), - Some(line( - "Ensure the required global config exists and retry the command.", - )), - None, - optional_details(json!({ "error": error })), - )); - return ExitCode::FAILURE; - } - }; - match serde_json::to_string_pretty(&result.config) { - Ok(json) => { - println!("{json}"); - ExitCode::SUCCESS - } - Err(e) => { - logger.error(diagnostic( - "CONFIG_SERIALIZATION_FAILED", - "Failed to serialize the config", - line("The merged config could not be converted to JSON for display."), - None, - None, - optional_details(json!({ "error": e.to_string() })), - )); - ExitCode::FAILURE - } - } -} diff --git a/sdk/src/commands/factories/ExecuteCommandFactory.ts b/sdk/src/commands/factories/ExecuteCommandFactory.ts deleted file mode 100644 index d7a6f8dc..00000000 --- a/sdk/src/commands/factories/ExecuteCommandFactory.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {ExecuteCommand} from '../ExecuteCommand' - -/** - * Factory for creating ExecuteCommand (default command) - * Handles default execution when no specific subcommand matches - */ -export class ExecuteCommandFactory implements CommandFactory { - canHandle(args: ParsedCliArgs): boolean { // This is a catch-all factory with lowest priority - void args - return true - } - - createCommand(args: ParsedCliArgs): Command { - void args - return new ExecuteCommand() - } -} diff --git a/sdk/src/config.plugins-fast-path.test.ts b/sdk/src/config.plugins-fast-path.test.ts index 6dc21219..5ca909e6 100644 --- a/sdk/src/config.plugins-fast-path.test.ts +++ b/sdk/src/config.plugins-fast-path.test.ts @@ -31,7 +31,7 @@ describe('defineConfig plugins fast path', () => { try { const result = await defineConfig({ loadUserConfig: false, - pipelineArgs: ['node', 'tnmsc', 'plugins', '--json'], + runtimeCommand: 'plugins', pluginOptions: { workspaceDir: tempWorkspace, plugins: [] @@ -42,8 +42,7 @@ describe('defineConfig plugins fast path', () => { expect(result.context.workspace.directory.path).toBe(tempWorkspace) expect(result.context.aindexDir).toBe(path.join(tempWorkspace, 'aindex')) expect(result.outputPlugins).toEqual([]) - } - finally { + } finally { fs.rmSync(tempWorkspace, {recursive: true, force: true}) } }) diff --git a/sdk/src/config.test.ts b/sdk/src/config.test.ts index 2b94ef42..578bc82f 100644 --- a/sdk/src/config.test.ts +++ b/sdk/src/config.test.ts @@ -32,23 +32,27 @@ describe('defineConfig', () => { delete process.env.HOMEPATH fs.mkdirSync(globalConfigDir, {recursive: true}) - fs.writeFileSync(globalConfigPath, JSON.stringify({ - workspaceDir: tempWorkspace, - aindex: { - dir: 'aindex', - skills: {src: 'skills', dist: 'dist/skills'}, - commands: {src: 'commands', dist: 'dist/commands'}, - subAgents: {src: 'subagents', dist: 'dist/subagents'}, - rules: {src: 'rules', dist: 'dist/rules'}, - globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, - workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, - app: {src: 'app', dist: 'dist/app'}, - ext: {src: 'ext', dist: 'dist/ext'}, - arch: {src: 'arch', dist: 'dist/arch'}, - softwares: {src: 'softwares', dist: 'dist/softwares'} - }, - logLevel: 'info' - }), 'utf8') + fs.writeFileSync( + globalConfigPath, + JSON.stringify({ + workspaceDir: tempWorkspace, + aindex: { + dir: 'aindex', + skills: {src: 'skills', dist: 'dist/skills'}, + commands: {src: 'commands', dist: 'dist/commands'}, + subAgents: {src: 'subagents', dist: 'dist/subagents'}, + rules: {src: 'rules', dist: 'dist/rules'}, + globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, + workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, + app: {src: 'app', dist: 'dist/app'}, + ext: {src: 'ext', dist: 'dist/ext'}, + arch: {src: 'arch', dist: 'dist/arch'}, + softwares: {src: 'softwares', dist: 'dist/softwares'} + }, + logLevel: 'info' + }), + 'utf8' + ) fs.writeFileSync(localConfigPath, JSON.stringify({workspaceDir: '/wrong/workspace', logLevel: 'error'}), 'utf8') try { @@ -58,8 +62,7 @@ describe('defineConfig', () => { expect(result.userConfigOptions.aindex.softwares).toEqual({src: 'softwares', dist: 'dist/softwares'}) expect(result.context.workspace.directory.path).toBe(tempWorkspace) expect(result.context.aindexDir).toBe(path.join(tempWorkspace, 'aindex')) - } - finally { + } finally { fs.rmSync(tempWorkspace, {recursive: true, force: true}) fs.rmSync(tempHome, {recursive: true, force: true}) } @@ -72,27 +75,25 @@ describe('defineConfig', () => { fs.mkdirSync(path.join(publicDir, 'execute'), {recursive: true}) fs.mkdirSync(path.join(publicDir, 'dry-run'), {recursive: true}) - fs.writeFileSync(path.join(publicDir, 'proxy.ts'), [ - 'export default (_logicalPath, ctx) => ctx.command === "dry-run"', - ' ? "dry-run/gitignore"', - ' : "execute/gitignore"', - '' - ].join('\n'), 'utf8') + fs.writeFileSync( + path.join(publicDir, 'proxy.ts'), + ['export default (_logicalPath, ctx) => ctx.command === "dry-run"', ' ? "dry-run/gitignore"', ' : "execute/gitignore"', ''].join('\n'), + 'utf8' + ) fs.writeFileSync(path.join(publicDir, 'execute', 'gitignore'), 'execute\n', 'utf8') fs.writeFileSync(path.join(publicDir, 'dry-run', 'gitignore'), 'dry-run\n', 'utf8') try { const result = await defineConfig({ loadUserConfig: false, - pipelineArgs: ['node', 'tnmsc', 'dry-run'], + runtimeCommand: 'dry-run', pluginOptions: { workspaceDir: tempWorkspace } }) expect(result.context.globalGitIgnore).toBe('dry-run\n') - } - finally { + } finally { fs.rmSync(tempWorkspace, {recursive: true, force: true}) } }) @@ -116,8 +117,7 @@ describe('defineConfig', () => { expect(result.context.workspace.directory.path).toBe(tempWorkspace) expect(fs.existsSync(orphanSkillFile)).toBe(true) - } - finally { + } finally { fs.rmSync(tempWorkspace, {recursive: true, force: true}) } }) @@ -144,8 +144,7 @@ describe('defineConfig', () => { expect(result.context.workspace.directory.path).toBe(tempWorkspace) expect(fs.existsSync(orphanSkillFile)).toBe(true) - } - finally { + } finally { fs.rmSync(tempWorkspace, {recursive: true, force: true}) fs.rmSync(tempHome, {recursive: true, force: true}) } @@ -165,8 +164,7 @@ describe('defineConfig', () => { expect(result.context.workspace.directory.path).toBe(tempWorkspace) expect(result.outputPlugins).toEqual([]) - } - finally { + } finally { fs.rmSync(tempWorkspace, {recursive: true, force: true}) } }) diff --git a/sdk/src/config.ts b/sdk/src/config.ts index eff2e696..f19f27d1 100644 --- a/sdk/src/config.ts +++ b/sdk/src/config.ts @@ -14,11 +14,12 @@ import type { UserConfigFile, WindowsOptions } from './plugins/plugin-core' +import type {RuntimeCommand} from './runtime-command' import * as path from 'node:path' import {createLogger} from '@truenine/logger' import {checkVersionControl} from './Aindex' import {getConfigLoader} from './ConfigLoader' -import {collectInputContext, resolveRuntimeCommand} from './inputs/runtime' +import {collectInputContext} from './inputs/runtime' import { buildDefaultAindexConfig, FilePathKind, @@ -100,7 +101,7 @@ export interface DefineConfigOptions { readonly cwd?: string - readonly pipelineArgs?: readonly string[] + readonly runtimeCommand?: RuntimeCommand } /** @@ -108,20 +109,12 @@ export interface DefineConfigOptions { * Later options override earlier ones. * Similar to vite/vitest mergeConfig. */ -export function mergeConfig( - ...configs: Partial[] -): Required { +export function mergeConfig(...configs: Partial[]): Required { const initialConfig: Required = {...DEFAULT_OPTIONS} - return configs.reduce( - (acc: Required, config) => mergeTwoConfigs(acc, config), - initialConfig - ) + return configs.reduce((acc: Required, config) => mergeTwoConfigs(acc, config), initialConfig) } -function mergeTwoConfigs( - base: Required, - override: Partial -): Required { +function mergeTwoConfigs(base: Required, override: Partial): Required { const overridePlugins = override.plugins const overrideCommandSeries = override.commandSeriesOptions const overrideOutputScopes = override.outputScopes @@ -133,7 +126,8 @@ function mergeTwoConfigs( ...base, ...override, aindex: mergeAindexConfig(base.aindex, override.aindex), - plugins: [ // Array concatenation for plugins + plugins: [ + // Array concatenation for plugins ...base.plugins, ...overridePlugins ?? [] ], @@ -145,20 +139,19 @@ function mergeTwoConfigs( } } -function mergeCommandSeriesOptions( - base?: CommandSeriesOptions, - override?: CommandSeriesOptions -): CommandSeriesOptions { +function mergeCommandSeriesOptions(base?: CommandSeriesOptions, override?: CommandSeriesOptions): CommandSeriesOptions { if (override == null) return base ?? {} if (base == null) return override const mergedPluginOverrides: Record = {} // Merge pluginOverrides deeply - if (base.pluginOverrides != null) { // Copy base plugin overrides + if (base.pluginOverrides != null) { + // Copy base plugin overrides for (const [key, value] of Object.entries(base.pluginOverrides)) mergedPluginOverrides[key] = {...value} } - if (override.pluginOverrides != null) { // Merge override plugin overrides + if (override.pluginOverrides != null) { + // Merge override plugin overrides for (const [key, value] of Object.entries(override.pluginOverrides)) { mergedPluginOverrides[key] = { ...mergedPluginOverrides[key], @@ -176,20 +169,14 @@ function mergeCommandSeriesOptions( return {} } -function mergeOutputScopeTopics( - base?: PluginOutputScopeTopics, - override?: PluginOutputScopeTopics -): PluginOutputScopeTopics | undefined { +function mergeOutputScopeTopics(base?: PluginOutputScopeTopics, override?: PluginOutputScopeTopics): PluginOutputScopeTopics | undefined { if (base == null && override == null) return void 0 if (base == null) return override if (override == null) return base return {...base, ...override} } -function mergeOutputScopeOptions( - base?: OutputScopeOptions, - override?: OutputScopeOptions -): OutputScopeOptions { +function mergeOutputScopeOptions(base?: OutputScopeOptions, override?: OutputScopeOptions): OutputScopeOptions { if (override == null) return base ?? {} if (base == null) return override @@ -221,25 +208,16 @@ function mergeFrontMatterOptions( } } -function mergeCleanupProtectionOptions( - base?: CleanupProtectionOptions, - override?: CleanupProtectionOptions -): CleanupProtectionOptions { +function mergeCleanupProtectionOptions(base?: CleanupProtectionOptions, override?: CleanupProtectionOptions): CleanupProtectionOptions { if (override == null) return base ?? {} if (base == null) return override return { - rules: [ - ...base.rules ?? [], - ...override.rules ?? [] - ] + rules: [...base.rules ?? [], ...override.rules ?? []] } } -function mergeWindowsOptions( - base?: WindowsOptions, - override?: WindowsOptions -): WindowsOptions { +function mergeWindowsOptions(base?: WindowsOptions, override?: WindowsOptions): WindowsOptions { if (override == null) return base ?? {} if (base == null) return override @@ -264,16 +242,10 @@ function mergeWindowsOptions( * Check if options is DefineConfigOptions */ function isDefineConfigOptions(options: PluginOptions | DefineConfigOptions): options is DefineConfigOptions { - return 'pluginOptions' in options - || 'configLoaderOptions' in options - || 'loadUserConfig' in options - || 'cwd' in options - || 'pipelineArgs' in options + return 'pluginOptions' in options || 'configLoaderOptions' in options || 'loadUserConfig' in options || 'cwd' in options || 'runtimeCommand' in options } -function getProgrammaticPluginDeclaration( - options: PluginOptions | DefineConfigOptions -): { +function getProgrammaticPluginDeclaration(options: PluginOptions | DefineConfigOptions): { readonly hasExplicitProgrammaticPlugins: boolean readonly explicitProgrammaticPlugins?: PluginOptions['plugins'] } { @@ -300,9 +272,7 @@ function resolvePathForMinimalContext(rawPath: string, workspaceDir: string): st return path.normalize(resolveUserPath(resolvedPath)) } -function createMinimalOutputCollectedContext( - options: Required -): OutputCollectedContext { +function createMinimalOutputCollectedContext(options: Required): OutputCollectedContext { const workspaceDir = resolvePathForMinimalContext(options.workspaceDir, '') const aindexDir = path.join(workspaceDir, options.aindex.dir) @@ -319,15 +289,13 @@ function createMinimalOutputCollectedContext( }) } -function shouldUsePluginsFastPath(pipelineArgs?: readonly string[]): boolean { - return resolveRuntimeCommand(pipelineArgs) === 'plugins' +function shouldUsePluginsFastPath(runtimeCommand?: RuntimeCommand): boolean { + return runtimeCommand === 'plugins' } -async function resolvePluginSetup( - options: PluginOptions | DefineConfigOptions = {} -): Promise< +async function resolvePluginSetup(options: PluginOptions | DefineConfigOptions = {}): Promise< ResolvedPluginSetup & { - readonly pipelineArgs?: readonly string[] + readonly runtimeCommand?: RuntimeCommand readonly userConfigFound: boolean readonly userConfigSources: readonly string[] } @@ -336,26 +304,26 @@ async function resolvePluginSetup( cwd: string | undefined, pluginOptions: PluginOptions, configLoaderOptions: ConfigLoaderOptions | undefined, - pipelineArgs: readonly string[] | undefined + runtimeCommand: RuntimeCommand | undefined if (isDefineConfigOptions(options)) { ({ pluginOptions = {}, cwd, configLoaderOptions, - pipelineArgs + runtimeCommand } = { pluginOptions: options.pluginOptions, cwd: options.cwd, configLoaderOptions: options.configLoaderOptions, - pipelineArgs: options.pipelineArgs + runtimeCommand: options.runtimeCommand }) shouldLoadUserConfig = options.loadUserConfig ?? true } else { pluginOptions = options shouldLoadUserConfig = true configLoaderOptions = void 0 - pipelineArgs = void 0 + runtimeCommand = void 0 } let userConfigOptions: Partial = {} @@ -372,8 +340,7 @@ async function resolvePluginSetup( userConfigOptions = userConfigToPluginOptions(userConfigResult.config) userConfigFile = userConfigResult.config } - } - catch (error) { + } catch (error) { const errorMessage = error instanceof Error ? error.message : String(error) throw new Error(`Failed to load user config: ${errorMessage}`) } @@ -402,7 +369,7 @@ async function resolvePluginSetup( outputPlugins, inputCapabilities, ...userConfigFile != null && {userConfigFile}, - ...pipelineArgs != null && {pipelineArgs}, + ...runtimeCommand != null && {runtimeCommand}, userConfigFound, userConfigSources } @@ -419,20 +386,11 @@ async function resolvePluginSetup( * @param options - Plugin options or DefineConfigOptions */ export async function defineConfig(options: PluginOptions | DefineConfigOptions = {}): Promise { - const { - hasExplicitProgrammaticPlugins, - explicitProgrammaticPlugins - } = getProgrammaticPluginDeclaration(options) - const { - mergedOptions, - outputPlugins, - inputCapabilities, - userConfigFile, - pipelineArgs - } = await resolvePluginSetup(options) + const {hasExplicitProgrammaticPlugins, explicitProgrammaticPlugins} = getProgrammaticPluginDeclaration(options) + const {mergedOptions, outputPlugins, inputCapabilities, userConfigFile, runtimeCommand} = await resolvePluginSetup(options) const logger = createLogger('defineConfig', mergedOptions.logLevel) - if (shouldUsePluginsFastPath(pipelineArgs)) { + if (shouldUsePluginsFastPath(runtimeCommand)) { const context = createMinimalOutputCollectedContext(mergedOptions) return {context, outputPlugins, userConfigOptions: mergedOptions} } @@ -441,7 +399,7 @@ export async function defineConfig(options: PluginOptions | DefineConfigOptions userConfigOptions: mergedOptions, ...inputCapabilities.length > 0 ? {capabilities: inputCapabilities} : {}, includeBuiltinEffects: !(inputCapabilities.length > 0 || (hasExplicitProgrammaticPlugins && (explicitProgrammaticPlugins?.length ?? 0) === 0)), - ...pipelineArgs != null ? {pipelineArgs} : {}, + ...runtimeCommand != null ? {runtimeCommand} : {}, ...userConfigFile != null ? {userConfig: userConfigFile} : {} }) diff --git a/sdk/src/index.test.ts b/sdk/src/index.test.ts index 0727ccea..76abfdce 100644 --- a/sdk/src/index.test.ts +++ b/sdk/src/index.test.ts @@ -4,8 +4,8 @@ describe('library entrypoint', () => { it('can be imported without executing the CLI runtime', async () => { const mod = await import('./index') - expect(typeof mod.runCli).toBe('function') - expect(typeof mod.createDefaultPluginConfig).toBe('function') expect(typeof mod.listPrompts).toBe('function') + expect(typeof mod.defineConfig).toBe('function') + expect(typeof mod.performCleanup).toBe('function') }) }) diff --git a/sdk/src/index.ts b/sdk/src/index.ts index 4b8dd474..12529d9d 100644 --- a/sdk/src/index.ts +++ b/sdk/src/index.ts @@ -1,14 +1,44 @@ export * from './Aindex' -export * from './cli-runtime' export * from './config' export * from './ConfigLoader' -export { - createDefaultPluginConfig -} from './plugin.config' -export * from './PluginPipeline' +export * from './diagnostics' +export * from './pipeline/OutputRuntimeTargets' +export * from './plugins/plugin-agentskills-compact' +export * from './plugins/plugin-agentsmd' + +export * from './plugins/plugin-claude-code-cli' + export { DEFAULT_USER_CONFIG, PathPlaceholders } from './plugins/plugin-core' - +export * from './plugins/plugin-core' +export * from './plugins/plugin-cursor' +export * from './plugins/plugin-droid-cli' +export * from './plugins/plugin-editorconfig' +export * from './plugins/plugin-gemini-cli' +export * from './plugins/plugin-git-exclude' +export * from './plugins/plugin-jetbrains-ai-codex' +export * from './plugins/plugin-jetbrains-codestyle' +export * from './plugins/plugin-openai-codex-cli' +export * from './plugins/plugin-opencode-cli' +export * from './plugins/plugin-qoder-ide' +export * from './plugins/plugin-readme' +export * from './plugins/plugin-trae-cn-ide' +export * from './plugins/plugin-trae-ide' +export * from './plugins/plugin-vscode' +export * from './plugins/plugin-warp-ide' +export * from './plugins/plugin-windsurf' +export * from './plugins/plugin-zed' export * from './prompts' + +export { + logProtectedDeletionGuardError +} from './ProtectedDeletionGuard' +export * from './runtime-command' +export { + getRequiredGlobalConfigPath, + resolveUserPath +} from './runtime-environment' +export * from './runtime/cleanup' +export * from './wsl-mirror-sync' diff --git a/sdk/src/inputs/runtime.ts b/sdk/src/inputs/runtime.ts index 710fe2f8..83377e1f 100644 --- a/sdk/src/inputs/runtime.ts +++ b/sdk/src/inputs/runtime.ts @@ -1,11 +1,6 @@ import type {MdxGlobalScope} from '@truenine/md-compiler/globals' -import type { - InputCapability, - InputCapabilityContext, - InputCollectedContext, - PluginOptions, - UserConfigFile -} from '@/plugins/plugin-core' +import type {InputCapability, InputCapabilityContext, InputCollectedContext, PluginOptions, UserConfigFile} from '@/plugins/plugin-core' +import type {RuntimeCommand} from '@/runtime-command' import * as fs from 'node:fs' import * as path from 'node:path' @@ -32,13 +27,12 @@ import { WorkspaceInputCapability, ZedConfigInputCapability } from '@/inputs' -import {extractUserArgs, parseArgs} from '@/pipeline/CliArgumentParser' import {buildDependencyContext, mergeContexts} from '@/pipeline/ContextMerger' import {topologicalSort} from '@/pipeline/DependencyResolver' import {GlobalScopeCollector, ScopePriority, ScopeRegistry} from '@/plugins/plugin-core/GlobalScopeCollector' export interface InputRuntimeOptions { - readonly pipelineArgs?: readonly string[] + readonly runtimeCommand?: RuntimeCommand readonly userConfigOptions: Required readonly userConfig?: UserConfigFile readonly capabilities?: readonly InputCapability[] @@ -46,11 +40,7 @@ export interface InputRuntimeOptions { } function createBuiltinInputEffectCapabilities(): InputCapability[] { - return [ - new SkillDistCleanupEffectInputCapability(), - new OrphanFileCleanupEffectInputCapability(), - new MarkdownWhitespaceCleanupEffectInputCapability() - ] + return [new SkillDistCleanupEffectInputCapability(), new OrphanFileCleanupEffectInputCapability(), new MarkdownWhitespaceCleanupEffectInputCapability()] } function createBuiltinInputReaderCapabilities(): InputCapability[] { @@ -74,35 +64,9 @@ function createBuiltinInputReaderCapabilities(): InputCapability[] { ] } -export function resolveRuntimeCommand( - pipelineArgs?: readonly string[] -): InputCapabilityContext['runtimeCommand'] { - if (pipelineArgs == null || pipelineArgs.length === 0) return 'execute' - - const filteredArgs = pipelineArgs.filter((arg): arg is string => arg != null) - const userArgs = extractUserArgs(filteredArgs) - const args = parseArgs(userArgs) - - if (args.helpFlag || args.versionFlag || args.unknownCommand != null) return void 0 - if (args.subcommand === 'clean') return 'clean' - if (args.subcommand === 'plugins') return 'plugins' - if (args.subcommand === 'dry-run' || args.dryRun) return 'dry-run' - if (args.subcommand == null) return 'execute' - return void 0 -} - -export async function collectInputContext( - options: InputRuntimeOptions -): Promise> { - const { - pipelineArgs, - userConfigOptions, - userConfig, - capabilities, - includeBuiltinEffects = true - } = options +export async function collectInputContext(options: InputRuntimeOptions): Promise> { + const {runtimeCommand, userConfigOptions, userConfig, capabilities, includeBuiltinEffects = true} = options const logger = createLogger('InputRuntime', userConfigOptions.logLevel) - const runtimeCommand = resolveRuntimeCommand(pipelineArgs) const baseCtx: Omit = { logger, userConfigOptions, diff --git a/sdk/src/lib.rs b/sdk/src/lib.rs index e2d9bf5b..49f0797c 100644 --- a/sdk/src/lib.rs +++ b/sdk/src/lib.rs @@ -4,7 +4,6 @@ //! Bridge commands (Node.js): run_bridge_command pub mod bridge; -pub mod commands; pub mod core; pub(crate) mod diagnostic_helpers; @@ -67,6 +66,33 @@ pub fn config_show(cwd: &Path) -> Result { serde_json::to_string_pretty(&result.config).map_err(CliError::from) } +/// Update the canonical global config from key/value pairs and return the saved path. +pub fn update_global_config_from_pairs( + cwd: &Path, + pairs: &[(String, String)], +) -> Result { + let result = core::config::ConfigLoader::with_defaults() + .try_load(cwd) + .map_err(CliError::ConfigError)?; + let mut config = result.config; + + for (key, value) in pairs { + match key.as_str() { + "workspaceDir" => config.workspace_dir = Some(value.clone()), + "logLevel" => config.log_level = Some(value.clone()), + _ => {} + } + } + + let config_path = core::config::get_required_global_config_path().map_err(CliError::ConfigError)?; + if let Some(parent) = config_path.parent() { + std::fs::create_dir_all(parent)?; + } + let json = serde_json::to_string_pretty(&config)?; + std::fs::write(&config_path, &json)?; + Ok(config_path) +} + /// Execute a bridge command (execute, dry-run, clean, plugins) via Node.js subprocess. /// /// The subprocess output is captured (piped) and returned as a [`BridgeCommandResult`]. diff --git a/sdk/src/plugin.config.ts b/sdk/src/plugin.config.ts deleted file mode 100644 index 8d0dd887..00000000 --- a/sdk/src/plugin.config.ts +++ /dev/null @@ -1,58 +0,0 @@ -import type {PipelineConfig} from '@/config' -import process from 'node:process' -import {GenericSkillsOutputPlugin} from '@truenine/plugin-agentskills-compact' -import {AgentsOutputPlugin} from '@truenine/plugin-agentsmd' -import {ClaudeCodeCLIOutputPlugin} from '@truenine/plugin-claude-code-cli' -import {CursorOutputPlugin} from '@truenine/plugin-cursor' -import {DroidCLIOutputPlugin} from '@truenine/plugin-droid-cli' -import {EditorConfigOutputPlugin} from '@truenine/plugin-editorconfig' -import {GeminiCLIOutputPlugin} from '@truenine/plugin-gemini-cli' -import {GitExcludeOutputPlugin} from '@truenine/plugin-git-exclude' -import {JetBrainsAIAssistantCodexOutputPlugin} from '@truenine/plugin-jetbrains-ai-codex' -import {JetBrainsIDECodeStyleConfigOutputPlugin} from '@truenine/plugin-jetbrains-codestyle' -import {CodexCLIOutputPlugin} from '@truenine/plugin-openai-codex-cli' -import {OpencodeCLIOutputPlugin} from '@truenine/plugin-opencode-cli' -import {QoderIDEPluginOutputPlugin} from '@truenine/plugin-qoder-ide' -import {ReadmeMdConfigFileOutputPlugin} from '@truenine/plugin-readme' -import {TraeIDEOutputPlugin} from '@truenine/plugin-trae-ide' -import {VisualStudioCodeIDEConfigOutputPlugin} from '@truenine/plugin-vscode' -import {WarpIDEOutputPlugin} from '@truenine/plugin-warp-ide' -import {WindsurfOutputPlugin} from '@truenine/plugin-windsurf' -import {ZedIDEConfigOutputPlugin} from '@truenine/plugin-zed' -import {defineConfig} from '@/config' -import {TraeCNIDEOutputPlugin} from '@/plugins/plugin-trae-cn-ide' - -export async function createDefaultPluginConfig( - pipelineArgs: readonly string[] = process.argv -): Promise { - return defineConfig({ - pipelineArgs, - pluginOptions: { - plugins: [ - new AgentsOutputPlugin(), - new ClaudeCodeCLIOutputPlugin(), - new CodexCLIOutputPlugin(), - new JetBrainsAIAssistantCodexOutputPlugin(), - new DroidCLIOutputPlugin(), - new GeminiCLIOutputPlugin(), - new GenericSkillsOutputPlugin(), - new OpencodeCLIOutputPlugin(), - new QoderIDEPluginOutputPlugin(), - new TraeIDEOutputPlugin(), - new TraeCNIDEOutputPlugin(), - new WarpIDEOutputPlugin(), - new WindsurfOutputPlugin(), - new CursorOutputPlugin(), - new GitExcludeOutputPlugin(), - - new JetBrainsIDECodeStyleConfigOutputPlugin(), - new EditorConfigOutputPlugin(), - new VisualStudioCodeIDEConfigOutputPlugin(), - new ZedIDEConfigOutputPlugin(), - new ReadmeMdConfigFileOutputPlugin() - ] - } - }) -} - -export default createDefaultPluginConfig diff --git a/sdk/src/plugins/plugin-core/plugin.ts b/sdk/src/plugins/plugin-core/plugin.ts index 9bed4e00..6572f661 100644 --- a/sdk/src/plugins/plugin-core/plugin.ts +++ b/sdk/src/plugins/plugin-core/plugin.ts @@ -12,11 +12,8 @@ import type { WindowsOptions } from './ConfigTypes.schema' import type {PluginKind} from './enums' -import type { - InputCollectedContext, - OutputCollectedContext, - Project -} from './InputTypes' +import type {InputCollectedContext, OutputCollectedContext, Project} from './InputTypes' +import type {RuntimeCommand} from '@/runtime-command' import {Buffer} from 'node:buffer' import * as fs from 'node:fs' import * as path from 'node:path' @@ -51,7 +48,7 @@ export interface PluginContext { export interface InputCapabilityContext extends PluginContext { readonly userConfigOptions: Required readonly dependencyContext: Partial - readonly runtimeCommand?: 'execute' | 'dry-run' | 'clean' | 'plugins' + readonly runtimeCommand?: RuntimeCommand readonly globalScope?: MdxGlobalScope @@ -336,10 +333,7 @@ function normalizeScopeSelection(selection: OutputScopeSelection): readonly Outp return unique } -function getPluginScopeOverrides( - pluginName: string, - pluginOptions?: PluginOptions -): PluginOutputScopeTopics | undefined { +function getPluginScopeOverrides(pluginName: string, pluginOptions?: PluginOptions): PluginOutputScopeTopics | undefined { return pluginOptions?.outputScopes?.plugins?.[pluginName] } @@ -351,10 +345,7 @@ export function validateOutputPluginCapabilities(plugin: OutputPlugin): void { } } -export function validateOutputScopeOverridesForPlugin( - plugin: OutputPlugin, - pluginOptions?: PluginOptions -): void { +export function validateOutputScopeOverridesForPlugin(plugin: OutputPlugin, pluginOptions?: PluginOptions): void { const overrides = getPluginScopeOverrides(plugin.name, pluginOptions) if (overrides == null) return @@ -390,10 +381,7 @@ export function validateOutputScopeOverridesForPlugin( } } -export function validateOutputScopeOverridesForPlugins( - plugins: readonly OutputPlugin[], - pluginOptions?: PluginOptions -): void { +export function validateOutputScopeOverridesForPlugins(plugins: readonly OutputPlugin[], pluginOptions?: PluginOptions): void { for (const plugin of plugins) { validateOutputPluginCapabilities(plugin) validateOutputScopeOverridesForPlugin(plugin, pluginOptions) @@ -406,9 +394,7 @@ export async function collectOutputDeclarations( ): Promise> { validateOutputScopeOverridesForPlugins(plugins, ctx.pluginOptions) - const declarationEntries = await Promise.all( - plugins.map(async plugin => [plugin, await plugin.declareOutputFiles(ctx)] as const) - ) + const declarationEntries = await Promise.all(plugins.map(async plugin => [plugin, await plugin.declareOutputFiles(ctx)] as const)) return new Map(declarationEntries) } @@ -447,12 +433,9 @@ export async function executeDeclarativeWriteOutputs( if (declaration.ifExists === 'error' && fs.existsSync(declaration.path)) throw new Error(`Refusing to overwrite existing file: ${declaration.path}`) const content = await plugin.convertContent(declaration, ctx) - isNodeBufferLike(content) - ? fs.writeFileSync(declaration.path, content) - : fs.writeFileSync(declaration.path, content, 'utf8') + isNodeBufferLike(content) ? fs.writeFileSync(declaration.path, content) : fs.writeFileSync(declaration.path, content, 'utf8') fileResults.push({path: declaration.path, success: true}) - } - catch (error) { + } catch (error) { fileResults.push({path: declaration.path, success: false, error: error as Error}) } } @@ -489,12 +472,10 @@ export async function collectAllPluginOutputs( const globalDirs: string[] = [] const globalFiles: string[] = [] - const declarationGroups = predeclaredOutputs != null - ? [...predeclaredOutputs.values()] - : Array.from( - await collectOutputDeclarations(plugins, {...ctx, dryRun: true}), - ([, declarations]) => declarations - ) + const declarationGroups + = predeclaredOutputs != null + ? [...predeclaredOutputs.values()] + : Array.from(await collectOutputDeclarations(plugins, {...ctx, dryRun: true}), ([, declarations]) => declarations) for (const declarations of declarationGroups) { for (const declaration of declarations) { diff --git a/sdk/src/runtime-command.ts b/sdk/src/runtime-command.ts new file mode 100644 index 00000000..91b6f01e --- /dev/null +++ b/sdk/src/runtime-command.ts @@ -0,0 +1 @@ +export type RuntimeCommand = 'execute' | 'dry-run' | 'clean' | 'plugins' diff --git a/sdk/src/runtime/cleanup.ts b/sdk/src/runtime/cleanup.ts new file mode 100644 index 00000000..12d66c3d --- /dev/null +++ b/sdk/src/runtime/cleanup.ts @@ -0,0 +1,461 @@ +import type { + ILogger, + OutputCleanContext, + OutputCleanupDeclarations, + OutputCleanupPathDeclaration, + OutputFileDeclaration, + OutputPlugin, + PluginOptions +} from '../plugins/plugin-core' +import type {ProtectionMode, ProtectionRuleMatcher} from '../ProtectedDeletionGuard' +import {buildDiagnostic, buildFileOperationDiagnostic, diagnosticLines} from '@/diagnostics' +import {loadAindexProjectConfig} from '../aindex-config/AindexProjectConfigLoader' +import {getNativeBinding} from '../core/native-binding' +import {collectAllPluginOutputs} from '../plugins/plugin-core' +import { + collectConfiguredAindexInputRules, + collectProjectRoots, + collectProtectedInputSourceRules, + logProtectedDeletionGuardError +} from '../ProtectedDeletionGuard' + +let nativeCleanupBindingCheck: boolean | null = null + +export interface CleanupResult { + readonly deletedFiles: number + readonly deletedDirs: number + readonly errors: readonly CleanupError[] + readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[] + readonly conflicts: readonly CleanupProtectionConflict[] + readonly message?: string +} + +export interface CleanupError { + readonly path: string + readonly type: 'file' | 'directory' + readonly error: unknown +} + +export interface CleanupProtectionConflict { + readonly outputPath: string + readonly outputPlugin: string + readonly protectedPath: string + readonly protectionMode: ProtectionMode + readonly protectedBy: string + readonly reason: string +} + +export class CleanupProtectionConflictError extends Error { + readonly conflicts: readonly CleanupProtectionConflict[] + + constructor(conflicts: readonly CleanupProtectionConflict[]) { + super(buildCleanupProtectionConflictMessage(conflicts)) + this.name = 'CleanupProtectionConflictError' + this.conflicts = conflicts + } +} + +interface NativeCleanupBinding { + readonly planCleanup?: (snapshotJson: string) => string | Promise + readonly performCleanup?: (snapshotJson: string) => string | Promise +} + +type NativeProtectionMode = 'direct' | 'recursive' +type NativeProtectionRuleMatcher = 'path' | 'glob' +type NativeCleanupTargetKind = 'file' | 'directory' | 'glob' +type NativeCleanupErrorKind = 'file' | 'directory' + +interface NativeCleanupTarget { + readonly path: string + readonly kind: NativeCleanupTargetKind + readonly excludeBasenames?: readonly string[] + readonly protectionMode?: NativeProtectionMode + readonly scope?: string + readonly label?: string +} + +interface NativeCleanupDeclarations { + readonly delete?: readonly NativeCleanupTarget[] + readonly protect?: readonly NativeCleanupTarget[] + readonly excludeScanGlobs?: readonly string[] +} + +interface NativePluginCleanupSnapshot { + readonly pluginName: string + readonly outputs: readonly string[] + readonly cleanup: NativeCleanupDeclarations +} + +interface NativeProtectedRule { + readonly path: string + readonly protectionMode: NativeProtectionMode + readonly reason: string + readonly source: string + readonly matcher?: NativeProtectionRuleMatcher | undefined +} + +interface NativeCleanupSnapshot { + readonly workspaceDir: string + readonly aindexDir?: string + readonly projectRoots: readonly string[] + readonly protectedRules: readonly NativeProtectedRule[] + readonly pluginSnapshots: readonly NativePluginCleanupSnapshot[] + readonly emptyDirExcludeGlobs?: readonly string[] +} + +interface NativeProtectedPathViolation { + readonly targetPath: string + readonly protectedPath: string + readonly protectionMode: NativeProtectionMode + readonly reason: string + readonly source: string +} + +interface NativeCleanupProtectionConflict { + readonly outputPath: string + readonly outputPlugin: string + readonly protectedPath: string + readonly protectionMode: NativeProtectionMode + readonly protectedBy: string + readonly reason: string +} + +interface NativeCleanupPlan { + readonly filesToDelete: string[] + readonly dirsToDelete: string[] + readonly emptyDirsToDelete: string[] + readonly violations: readonly NativeProtectedPathViolation[] + readonly conflicts: readonly NativeCleanupProtectionConflict[] + readonly excludedScanGlobs: string[] +} + +interface NativeCleanupError { + readonly path: string + readonly kind: NativeCleanupErrorKind + readonly error: string +} + +interface NativeCleanupResult { + readonly deletedFiles: number + readonly deletedDirs: number + readonly errors: readonly NativeCleanupError[] + readonly violations: readonly NativeProtectedPathViolation[] + readonly conflicts: readonly NativeCleanupProtectionConflict[] + readonly filesToDelete: string[] + readonly dirsToDelete: string[] + readonly emptyDirsToDelete: string[] + readonly excludedScanGlobs: string[] +} + +export function hasNativeCleanupBinding(): boolean { + if (nativeCleanupBindingCheck !== null) { + return nativeCleanupBindingCheck + } + const nativeBinding = getNativeBinding() + nativeCleanupBindingCheck = nativeBinding?.planCleanup != null && nativeBinding.performCleanup != null + return nativeCleanupBindingCheck +} + +function requireNativeCleanupBinding(): NativeCleanupBinding { + const nativeBinding = getNativeBinding() + if (nativeBinding == null) { + throw new Error('Native cleanup binding is required. Build or install the Rust NAPI package before running tnmsc.') + } + return nativeBinding +} + +function mapProtectionMode(mode: ProtectionMode): NativeProtectionMode { + return mode +} + +function mapProtectionRuleMatcher(matcher: ProtectionRuleMatcher | undefined): NativeProtectionRuleMatcher | undefined { + return matcher +} + +function mapCleanupTarget(target: OutputCleanupPathDeclaration): NativeCleanupTarget { + return { + path: target.path, + kind: target.kind, + ...target.excludeBasenames != null && target.excludeBasenames.length > 0 ? {excludeBasenames: [...target.excludeBasenames]} : {}, + ...target.protectionMode != null ? {protectionMode: mapProtectionMode(target.protectionMode)} : {}, + ...target.scope != null ? {scope: target.scope} : {}, + ...target.label != null ? {label: target.label} : {} + } +} + +async function collectPluginCleanupDeclarations(plugin: OutputPlugin, cleanCtx: OutputCleanContext): Promise { + if (plugin.declareCleanupPaths == null) return {} + return plugin.declareCleanupPaths({...cleanCtx, dryRun: true}) +} + +async function collectPluginCleanupSnapshot( + plugin: OutputPlugin, + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise { + const existingOutputDeclarations = predeclaredOutputs?.get(plugin) + const [outputs, cleanup] = await Promise.all([ + existingOutputDeclarations != null ? Promise.resolve(existingOutputDeclarations) : plugin.declareOutputFiles({...cleanCtx, dryRun: true}), + collectPluginCleanupDeclarations(plugin, cleanCtx) + ]) + + return { + pluginName: plugin.name, + outputs: outputs.map(output => output.path), + cleanup: { + ...cleanup.delete != null && cleanup.delete.length > 0 ? {delete: cleanup.delete.map(mapCleanupTarget)} : {}, + ...cleanup.protect != null && cleanup.protect.length > 0 ? {protect: cleanup.protect.map(mapCleanupTarget)} : {}, + ...cleanup.excludeScanGlobs != null && cleanup.excludeScanGlobs.length > 0 ? {excludeScanGlobs: [...cleanup.excludeScanGlobs]} : {} + } + } +} + +function collectConfiguredCleanupProtectionRules(cleanCtx: OutputCleanContext): NativeProtectedRule[] { + return (cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []).map(rule => ({ + path: rule.path, + protectionMode: mapProtectionMode(rule.protectionMode), + reason: rule.reason ?? 'configured cleanup protection rule', + source: 'configured-cleanup-protection', + matcher: mapProtectionRuleMatcher(rule.matcher ?? 'path') + })) +} + +function buildCleanupProtectionConflictMessage(conflicts: readonly NativeCleanupProtectionConflict[]): string { + const pathList = conflicts.map(conflict => conflict.outputPath).join(', ') + return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}` +} + +function logCleanupProtectionConflicts(logger: ILogger, conflicts: readonly NativeCleanupProtectionConflict[]): void { + const firstConflict = conflicts[0] + + logger.error( + buildDiagnostic({ + code: 'CLEANUP_PROTECTION_CONFLICT_DETECTED', + title: 'Cleanup output paths conflict with protected inputs', + rootCause: diagnosticLines( + `tnmsc found ${conflicts.length} output path(s) that also match protected cleanup rules.`, + firstConflict == null + ? 'No conflict details were captured.' + : `Example conflict: "${firstConflict.outputPath}" is protected by "${firstConflict.protectedPath}".` + ), + exactFix: diagnosticLines('Separate generated output paths from protected source or reserved workspace paths before running cleanup again.'), + possibleFixes: [ + diagnosticLines('Update cleanup protect declarations so they do not overlap generated outputs.'), + diagnosticLines('Move the conflicting output target to a generated-only directory.') + ], + details: { + count: conflicts.length, + conflicts + } + }) + ) +} + +function logCleanupPlanDiagnostics( + logger: ILogger, + plan: Pick< + NativeCleanupPlan | NativeCleanupResult, + 'filesToDelete' | 'dirsToDelete' | 'emptyDirsToDelete' | 'violations' | 'conflicts' | 'excludedScanGlobs' + > +): void { + logger.debug('cleanup plan built', { + filesToDelete: plan.filesToDelete.length, + dirsToDelete: plan.dirsToDelete.length + plan.emptyDirsToDelete.length, + emptyDirsToDelete: plan.emptyDirsToDelete.length, + violations: plan.violations.length, + conflicts: plan.conflicts.length, + excludedScanGlobs: plan.excludedScanGlobs + }) +} + +function logNativeCleanupErrors( + logger: ILogger, + errors: readonly NativeCleanupError[] +): readonly {path: string, type: 'file' | 'directory', error: string}[] { + return errors.map(currentError => { + const type = currentError.kind === 'directory' ? 'directory' : 'file' + logger.warn( + buildFileOperationDiagnostic({ + code: type === 'file' ? 'CLEANUP_FILE_DELETE_FAILED' : 'CLEANUP_DIRECTORY_DELETE_FAILED', + title: type === 'file' ? 'Cleanup could not delete a file' : 'Cleanup could not delete a directory', + operation: 'delete', + targetKind: type, + path: currentError.path, + error: currentError.error, + details: { + phase: 'cleanup' + } + }) + ) + + return {path: currentError.path, type, error: currentError.error} + }) +} + +async function buildCleanupSnapshot( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise { + const pluginSnapshots = await Promise.all(outputPlugins.map(async plugin => collectPluginCleanupSnapshot(plugin, cleanCtx, predeclaredOutputs))) + + const protectedRules: NativeProtectedRule[] = [] + for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) { + protectedRules.push({ + path: rule.path, + protectionMode: mapProtectionMode(rule.protectionMode), + reason: rule.reason, + source: rule.source, + ...rule.matcher != null ? {matcher: mapProtectionRuleMatcher(rule.matcher)} : {} + }) + } + + if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) { + for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, { + workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path + })) { + protectedRules.push({ + path: rule.path, + protectionMode: mapProtectionMode(rule.protectionMode), + reason: rule.reason, + source: rule.source, + ...rule.matcher != null ? {matcher: mapProtectionRuleMatcher(rule.matcher)} : {} + }) + } + } + + protectedRules.push(...collectConfiguredCleanupProtectionRules(cleanCtx)) + + let emptyDirExcludeGlobs: string[] | undefined + if (cleanCtx.collectedOutputContext.aindexDir != null) { + const aindexConfig = await loadAindexProjectConfig(cleanCtx.collectedOutputContext.aindexDir) + if (aindexConfig.found) { + const exclude = aindexConfig.config.emptyDirCleanup?.exclude + if (exclude != null && exclude.length > 0) { + emptyDirExcludeGlobs = [...exclude] + } + } + } + + return { + workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path, + ...cleanCtx.collectedOutputContext.aindexDir != null ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir} : {}, + projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext), + protectedRules, + pluginSnapshots, + ...emptyDirExcludeGlobs != null && emptyDirExcludeGlobs.length > 0 ? {emptyDirExcludeGlobs} : {} + } +} + +function parseNativeJson(json: string): T { + return JSON.parse(json) as T +} + +export async function planCleanupWithNative(snapshot: NativeCleanupSnapshot): Promise { + const nativeBinding = requireNativeCleanupBinding() + if (nativeBinding?.planCleanup == null) throw new Error('Native cleanup planning is unavailable') + const result = await Promise.resolve(nativeBinding.planCleanup(JSON.stringify(snapshot))) + return parseNativeJson(result) +} + +export async function performCleanupWithNative(snapshot: NativeCleanupSnapshot): Promise { + const nativeBinding = requireNativeCleanupBinding() + if (nativeBinding?.performCleanup == null) throw new Error('Native cleanup execution is unavailable') + const result = await Promise.resolve(nativeBinding.performCleanup(JSON.stringify(snapshot))) + return parseNativeJson(result) +} + +export async function collectDeletionTargets( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise<{ + filesToDelete: string[] + dirsToDelete: string[] + emptyDirsToDelete: string[] + violations: import('../ProtectedDeletionGuard').ProtectedPathViolation[] + conflicts: CleanupProtectionConflict[] + excludedScanGlobs: string[] +}> { + const snapshot = await buildCleanupSnapshot(outputPlugins, cleanCtx, predeclaredOutputs) + const plan = await planCleanupWithNative(snapshot) + + if (plan.conflicts.length > 0) { + throw new CleanupProtectionConflictError(plan.conflicts) + } + + return { + filesToDelete: plan.filesToDelete, + dirsToDelete: plan.dirsToDelete.sort((a, b) => a.localeCompare(b)), + emptyDirsToDelete: plan.emptyDirsToDelete.sort((a, b) => a.localeCompare(b)), + violations: [...plan.violations], + conflicts: [], + excludedScanGlobs: plan.excludedScanGlobs + } +} + +export async function performCleanup( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + logger: ILogger, + predeclaredOutputs?: ReadonlyMap +): Promise { + if (predeclaredOutputs != null) { + const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx, predeclaredOutputs) + logger.debug('Collected outputs for cleanup', { + projectDirs: outputs.projectDirs.length, + projectFiles: outputs.projectFiles.length, + globalDirs: outputs.globalDirs.length, + globalFiles: outputs.globalFiles.length + }) + } + + const snapshot = await buildCleanupSnapshot(outputPlugins, cleanCtx, predeclaredOutputs) + const result = await performCleanupWithNative(snapshot) + + logCleanupPlanDiagnostics(logger, result) + + if (result.conflicts.length > 0) { + logCleanupProtectionConflicts(logger, result.conflicts) + return { + deletedFiles: 0, + deletedDirs: 0, + errors: [], + violations: [], + conflicts: result.conflicts, + message: buildCleanupProtectionConflictMessage(result.conflicts) + } + } + + if (result.violations.length > 0) { + logProtectedDeletionGuardError(logger, 'cleanup', result.violations) + return { + deletedFiles: 0, + deletedDirs: 0, + errors: [], + violations: result.violations, + conflicts: [], + message: `Protected deletion guard blocked cleanup for ${result.violations.length} path(s)` + } + } + + logger.debug('cleanup delete execution started', { + filesToDelete: result.filesToDelete.length, + dirsToDelete: result.dirsToDelete.length + result.emptyDirsToDelete.length, + emptyDirsToDelete: result.emptyDirsToDelete.length + }) + const loggedErrors = logNativeCleanupErrors(logger, result.errors) + logger.debug('cleanup delete execution complete', { + deletedFiles: result.deletedFiles, + deletedDirs: result.deletedDirs, + errors: loggedErrors.length + }) + + return { + deletedFiles: result.deletedFiles, + deletedDirs: result.deletedDirs + result.emptyDirsToDelete.length, + errors: loggedErrors, + violations: [], + conflicts: [] + } +} diff --git a/sdk/tsdown.config.ts b/sdk/tsdown.config.ts index 183d9c5c..fa041e92 100644 --- a/sdk/tsdown.config.ts +++ b/sdk/tsdown.config.ts @@ -49,7 +49,7 @@ const pluginAliases: Record = { '@truenine/plugin-zed': resolve('src/plugins/plugin-zed.ts') } -const noExternalDeps = [ +const alwaysBundleDeps = [ '@truenine/logger', '@truenine/script-runtime', 'fast-glob', @@ -66,13 +66,13 @@ export default defineConfig([ sourcemap: false, unbundle: false, deps: { + alwaysBundle: alwaysBundleDeps, onlyBundle: false }, alias: { '@': resolve('src'), ...pluginAliases }, - noExternal: noExternalDeps, format: ['esm'], minify: true, dts: {sourcemap: false}, @@ -83,50 +83,6 @@ export default defineConfig([ __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry } }, - { - entry: ['./src/plugin-runtime.ts'], - platform: 'node', - sourcemap: false, - unbundle: false, - deps: { - onlyBundle: false - }, - alias: { - '@': resolve('src'), - ...pluginAliases - }, - noExternal: noExternalDeps, - format: ['esm'], - minify: true, - dts: false, - define: { - __CLI_VERSION__: JSON.stringify(pkg.version), - __CLI_PACKAGE_NAME__: JSON.stringify(pkg.name), - __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry - } - }, - { - entry: ['./src/script-runtime-worker.ts'], - platform: 'node', - sourcemap: false, - unbundle: false, - deps: { - onlyBundle: false - }, - alias: { - '@': resolve('src'), - ...pluginAliases - }, - noExternal: noExternalDeps, - format: ['esm'], - minify: false, - dts: false, - define: { - __CLI_VERSION__: JSON.stringify(pkg.version), - __CLI_PACKAGE_NAME__: JSON.stringify(pkg.name), - __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry - } - }, { entry: ['./src/globals.ts'], platform: 'node', From 9056679d2f1799f281cd5a2112ea9cac79f0f02e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Tue, 31 Mar 2026 12:49:05 +0800 Subject: [PATCH 15/27] refactor: remove noop files and update package.json scripts for platform packages --- cli/npm/.gitignore | 2 -- cli/npm/darwin-arm64/noop.cjs | 28 ---------------------------- cli/npm/darwin-arm64/noop.d.ts | 8 -------- cli/npm/darwin-arm64/package.json | 3 +++ cli/npm/darwin-x64/noop.cjs | 28 ---------------------------- cli/npm/darwin-x64/noop.d.ts | 8 -------- cli/npm/darwin-x64/package.json | 3 +++ cli/npm/linux-arm64-gnu/noop.cjs | 28 ---------------------------- cli/npm/linux-arm64-gnu/noop.d.ts | 8 -------- cli/npm/linux-arm64-gnu/package.json | 3 +++ cli/npm/linux-x64-gnu/noop.cjs | 28 ---------------------------- cli/npm/linux-x64-gnu/noop.d.ts | 8 -------- cli/npm/linux-x64-gnu/package.json | 3 +++ cli/npm/win32-x64-msvc/noop.cjs | 28 ---------------------------- cli/npm/win32-x64-msvc/noop.d.ts | 8 -------- cli/npm/win32-x64-msvc/package.json | 3 +++ 16 files changed, 15 insertions(+), 182 deletions(-) delete mode 100644 cli/npm/darwin-arm64/noop.cjs delete mode 100644 cli/npm/darwin-arm64/noop.d.ts delete mode 100644 cli/npm/darwin-x64/noop.cjs delete mode 100644 cli/npm/darwin-x64/noop.d.ts delete mode 100644 cli/npm/linux-arm64-gnu/noop.cjs delete mode 100644 cli/npm/linux-arm64-gnu/noop.d.ts delete mode 100644 cli/npm/linux-x64-gnu/noop.cjs delete mode 100644 cli/npm/linux-x64-gnu/noop.d.ts delete mode 100644 cli/npm/win32-x64-msvc/noop.cjs delete mode 100644 cli/npm/win32-x64-msvc/noop.d.ts diff --git a/cli/npm/.gitignore b/cli/npm/.gitignore index d98927ce..a85693ee 100644 --- a/cli/npm/.gitignore +++ b/cli/npm/.gitignore @@ -2,5 +2,3 @@ !.gitignore !*/ !*/package.json -!*/noop.cjs -!*/noop.d.ts diff --git a/cli/npm/darwin-arm64/noop.cjs b/cli/npm/darwin-arm64/noop.cjs deleted file mode 100644 index 84c0933b..00000000 --- a/cli/npm/darwin-arm64/noop.cjs +++ /dev/null @@ -1,28 +0,0 @@ -'use strict' - -const {readdirSync} = require('node:fs') -const {join} = require('node:path') - -const EXPORT_BINDINGS = [ - ['logger', 'napi-logger.'], - ['mdCompiler', 'napi-md-compiler.'], - ['scriptRuntime', 'napi-script-runtime.'], - ['config', 'napi-memory-sync-cli.'] -] - -const nodeFiles = readdirSync(__dirname).filter(file => file.endsWith('.node')) -const bindings = {} - -for (const [exportName, prefix] of EXPORT_BINDINGS) { - const file = nodeFiles.find(candidate => candidate.startsWith(prefix)) - if (file == null) continue - - Object.defineProperty(bindings, exportName, { - enumerable: true, - get() { - return require(join(__dirname, file)) - } - }) -} - -module.exports = bindings diff --git a/cli/npm/darwin-arm64/noop.d.ts b/cli/npm/darwin-arm64/noop.d.ts deleted file mode 100644 index 667d20dc..00000000 --- a/cli/npm/darwin-arm64/noop.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -declare const bindings: { - readonly logger?: unknown - readonly mdCompiler?: unknown - readonly scriptRuntime?: unknown - readonly config?: unknown -} - -export = bindings diff --git a/cli/npm/darwin-arm64/package.json b/cli/npm/darwin-arm64/package.json index e3cde4bc..4af896a7 100644 --- a/cli/npm/darwin-arm64/package.json +++ b/cli/npm/darwin-arm64/package.json @@ -8,6 +8,9 @@ "arm64" ], "license": "AGPL-3.0-only", + "scripts": { + "prepack": "node ../../../scripts/write-platform-package-shims.mjs ." + }, "main": "noop.cjs", "types": "noop.d.ts", "files": [ diff --git a/cli/npm/darwin-x64/noop.cjs b/cli/npm/darwin-x64/noop.cjs deleted file mode 100644 index 84c0933b..00000000 --- a/cli/npm/darwin-x64/noop.cjs +++ /dev/null @@ -1,28 +0,0 @@ -'use strict' - -const {readdirSync} = require('node:fs') -const {join} = require('node:path') - -const EXPORT_BINDINGS = [ - ['logger', 'napi-logger.'], - ['mdCompiler', 'napi-md-compiler.'], - ['scriptRuntime', 'napi-script-runtime.'], - ['config', 'napi-memory-sync-cli.'] -] - -const nodeFiles = readdirSync(__dirname).filter(file => file.endsWith('.node')) -const bindings = {} - -for (const [exportName, prefix] of EXPORT_BINDINGS) { - const file = nodeFiles.find(candidate => candidate.startsWith(prefix)) - if (file == null) continue - - Object.defineProperty(bindings, exportName, { - enumerable: true, - get() { - return require(join(__dirname, file)) - } - }) -} - -module.exports = bindings diff --git a/cli/npm/darwin-x64/noop.d.ts b/cli/npm/darwin-x64/noop.d.ts deleted file mode 100644 index 667d20dc..00000000 --- a/cli/npm/darwin-x64/noop.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -declare const bindings: { - readonly logger?: unknown - readonly mdCompiler?: unknown - readonly scriptRuntime?: unknown - readonly config?: unknown -} - -export = bindings diff --git a/cli/npm/darwin-x64/package.json b/cli/npm/darwin-x64/package.json index da2cf2a0..a6bc2521 100644 --- a/cli/npm/darwin-x64/package.json +++ b/cli/npm/darwin-x64/package.json @@ -8,6 +8,9 @@ "x64" ], "license": "AGPL-3.0-only", + "scripts": { + "prepack": "node ../../../scripts/write-platform-package-shims.mjs ." + }, "main": "noop.cjs", "types": "noop.d.ts", "files": [ diff --git a/cli/npm/linux-arm64-gnu/noop.cjs b/cli/npm/linux-arm64-gnu/noop.cjs deleted file mode 100644 index 84c0933b..00000000 --- a/cli/npm/linux-arm64-gnu/noop.cjs +++ /dev/null @@ -1,28 +0,0 @@ -'use strict' - -const {readdirSync} = require('node:fs') -const {join} = require('node:path') - -const EXPORT_BINDINGS = [ - ['logger', 'napi-logger.'], - ['mdCompiler', 'napi-md-compiler.'], - ['scriptRuntime', 'napi-script-runtime.'], - ['config', 'napi-memory-sync-cli.'] -] - -const nodeFiles = readdirSync(__dirname).filter(file => file.endsWith('.node')) -const bindings = {} - -for (const [exportName, prefix] of EXPORT_BINDINGS) { - const file = nodeFiles.find(candidate => candidate.startsWith(prefix)) - if (file == null) continue - - Object.defineProperty(bindings, exportName, { - enumerable: true, - get() { - return require(join(__dirname, file)) - } - }) -} - -module.exports = bindings diff --git a/cli/npm/linux-arm64-gnu/noop.d.ts b/cli/npm/linux-arm64-gnu/noop.d.ts deleted file mode 100644 index 667d20dc..00000000 --- a/cli/npm/linux-arm64-gnu/noop.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -declare const bindings: { - readonly logger?: unknown - readonly mdCompiler?: unknown - readonly scriptRuntime?: unknown - readonly config?: unknown -} - -export = bindings diff --git a/cli/npm/linux-arm64-gnu/package.json b/cli/npm/linux-arm64-gnu/package.json index 646ca5fc..70eaccbf 100644 --- a/cli/npm/linux-arm64-gnu/package.json +++ b/cli/npm/linux-arm64-gnu/package.json @@ -8,6 +8,9 @@ "arm64" ], "license": "AGPL-3.0-only", + "scripts": { + "prepack": "node ../../../scripts/write-platform-package-shims.mjs ." + }, "main": "noop.cjs", "types": "noop.d.ts", "files": [ diff --git a/cli/npm/linux-x64-gnu/noop.cjs b/cli/npm/linux-x64-gnu/noop.cjs deleted file mode 100644 index 84c0933b..00000000 --- a/cli/npm/linux-x64-gnu/noop.cjs +++ /dev/null @@ -1,28 +0,0 @@ -'use strict' - -const {readdirSync} = require('node:fs') -const {join} = require('node:path') - -const EXPORT_BINDINGS = [ - ['logger', 'napi-logger.'], - ['mdCompiler', 'napi-md-compiler.'], - ['scriptRuntime', 'napi-script-runtime.'], - ['config', 'napi-memory-sync-cli.'] -] - -const nodeFiles = readdirSync(__dirname).filter(file => file.endsWith('.node')) -const bindings = {} - -for (const [exportName, prefix] of EXPORT_BINDINGS) { - const file = nodeFiles.find(candidate => candidate.startsWith(prefix)) - if (file == null) continue - - Object.defineProperty(bindings, exportName, { - enumerable: true, - get() { - return require(join(__dirname, file)) - } - }) -} - -module.exports = bindings diff --git a/cli/npm/linux-x64-gnu/noop.d.ts b/cli/npm/linux-x64-gnu/noop.d.ts deleted file mode 100644 index 667d20dc..00000000 --- a/cli/npm/linux-x64-gnu/noop.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -declare const bindings: { - readonly logger?: unknown - readonly mdCompiler?: unknown - readonly scriptRuntime?: unknown - readonly config?: unknown -} - -export = bindings diff --git a/cli/npm/linux-x64-gnu/package.json b/cli/npm/linux-x64-gnu/package.json index 1a584e6c..98ef04f5 100644 --- a/cli/npm/linux-x64-gnu/package.json +++ b/cli/npm/linux-x64-gnu/package.json @@ -8,6 +8,9 @@ "x64" ], "license": "AGPL-3.0-only", + "scripts": { + "prepack": "node ../../../scripts/write-platform-package-shims.mjs ." + }, "main": "noop.cjs", "types": "noop.d.ts", "files": [ diff --git a/cli/npm/win32-x64-msvc/noop.cjs b/cli/npm/win32-x64-msvc/noop.cjs deleted file mode 100644 index 84c0933b..00000000 --- a/cli/npm/win32-x64-msvc/noop.cjs +++ /dev/null @@ -1,28 +0,0 @@ -'use strict' - -const {readdirSync} = require('node:fs') -const {join} = require('node:path') - -const EXPORT_BINDINGS = [ - ['logger', 'napi-logger.'], - ['mdCompiler', 'napi-md-compiler.'], - ['scriptRuntime', 'napi-script-runtime.'], - ['config', 'napi-memory-sync-cli.'] -] - -const nodeFiles = readdirSync(__dirname).filter(file => file.endsWith('.node')) -const bindings = {} - -for (const [exportName, prefix] of EXPORT_BINDINGS) { - const file = nodeFiles.find(candidate => candidate.startsWith(prefix)) - if (file == null) continue - - Object.defineProperty(bindings, exportName, { - enumerable: true, - get() { - return require(join(__dirname, file)) - } - }) -} - -module.exports = bindings diff --git a/cli/npm/win32-x64-msvc/noop.d.ts b/cli/npm/win32-x64-msvc/noop.d.ts deleted file mode 100644 index 667d20dc..00000000 --- a/cli/npm/win32-x64-msvc/noop.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -declare const bindings: { - readonly logger?: unknown - readonly mdCompiler?: unknown - readonly scriptRuntime?: unknown - readonly config?: unknown -} - -export = bindings diff --git a/cli/npm/win32-x64-msvc/package.json b/cli/npm/win32-x64-msvc/package.json index abba1832..c8234e2b 100644 --- a/cli/npm/win32-x64-msvc/package.json +++ b/cli/npm/win32-x64-msvc/package.json @@ -8,6 +8,9 @@ "x64" ], "license": "AGPL-3.0-only", + "scripts": { + "prepack": "node ../../../scripts/write-platform-package-shims.mjs ." + }, "main": "noop.cjs", "types": "noop.d.ts", "files": [ From a0bcba608137bcb8b6c2e36fb48adae7e60cd2fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Tue, 31 Mar 2026 16:00:04 +0800 Subject: [PATCH 16/27] refactor: update package.json scripts and file exports for platform packages --- cli/eslint.config.ts | 4 ++-- cli/npm/darwin-arm64/package.json | 16 +++++++++++----- cli/npm/darwin-x64/package.json | 16 +++++++++++----- cli/npm/linux-arm64-gnu/package.json | 16 +++++++++++----- cli/npm/linux-x64-gnu/package.json | 16 +++++++++++----- cli/npm/win32-x64-msvc/package.json | 16 +++++++++++----- 6 files changed, 57 insertions(+), 27 deletions(-) diff --git a/cli/eslint.config.ts b/cli/eslint.config.ts index 9c891393..e6caf052 100644 --- a/cli/eslint.config.ts +++ b/cli/eslint.config.ts @@ -17,8 +17,8 @@ const config = await eslint10({ ignores: [ '.turbo/**', 'aindex/**', - 'npm/**/noop.cjs', - 'npm/**/noop.d.ts', + 'npm/**/noop.mjs', + 'npm/**/noop.d.mts', '*.md', '**/*.md', '*.toml', diff --git a/cli/npm/darwin-arm64/package.json b/cli/npm/darwin-arm64/package.json index 4af896a7..1b807c39 100644 --- a/cli/npm/darwin-arm64/package.json +++ b/cli/npm/darwin-arm64/package.json @@ -9,13 +9,19 @@ ], "license": "AGPL-3.0-only", "scripts": { - "prepack": "node ../../../scripts/write-platform-package-shims.mjs ." + "prepack": "pnpm exec tsx ../../../scripts/write-platform-package-shims.ts ." + }, + "exports": { + ".": { + "types": "./noop.d.mts", + "import": "./noop.mjs" + }, + "./*.node": "./*.node", + "./package.json": "./package.json" }, - "main": "noop.cjs", - "types": "noop.d.ts", "files": [ "*.node", - "noop.cjs", - "noop.d.ts" + "noop.mjs", + "noop.d.mts" ] } diff --git a/cli/npm/darwin-x64/package.json b/cli/npm/darwin-x64/package.json index a6bc2521..c954526d 100644 --- a/cli/npm/darwin-x64/package.json +++ b/cli/npm/darwin-x64/package.json @@ -9,13 +9,19 @@ ], "license": "AGPL-3.0-only", "scripts": { - "prepack": "node ../../../scripts/write-platform-package-shims.mjs ." + "prepack": "pnpm exec tsx ../../../scripts/write-platform-package-shims.ts ." + }, + "exports": { + ".": { + "types": "./noop.d.mts", + "import": "./noop.mjs" + }, + "./*.node": "./*.node", + "./package.json": "./package.json" }, - "main": "noop.cjs", - "types": "noop.d.ts", "files": [ "*.node", - "noop.cjs", - "noop.d.ts" + "noop.mjs", + "noop.d.mts" ] } diff --git a/cli/npm/linux-arm64-gnu/package.json b/cli/npm/linux-arm64-gnu/package.json index 70eaccbf..e4fd843d 100644 --- a/cli/npm/linux-arm64-gnu/package.json +++ b/cli/npm/linux-arm64-gnu/package.json @@ -9,13 +9,19 @@ ], "license": "AGPL-3.0-only", "scripts": { - "prepack": "node ../../../scripts/write-platform-package-shims.mjs ." + "prepack": "pnpm exec tsx ../../../scripts/write-platform-package-shims.ts ." + }, + "exports": { + ".": { + "types": "./noop.d.mts", + "import": "./noop.mjs" + }, + "./*.node": "./*.node", + "./package.json": "./package.json" }, - "main": "noop.cjs", - "types": "noop.d.ts", "files": [ "*.node", - "noop.cjs", - "noop.d.ts" + "noop.mjs", + "noop.d.mts" ] } diff --git a/cli/npm/linux-x64-gnu/package.json b/cli/npm/linux-x64-gnu/package.json index 98ef04f5..64b02a95 100644 --- a/cli/npm/linux-x64-gnu/package.json +++ b/cli/npm/linux-x64-gnu/package.json @@ -9,13 +9,19 @@ ], "license": "AGPL-3.0-only", "scripts": { - "prepack": "node ../../../scripts/write-platform-package-shims.mjs ." + "prepack": "pnpm exec tsx ../../../scripts/write-platform-package-shims.ts ." + }, + "exports": { + ".": { + "types": "./noop.d.mts", + "import": "./noop.mjs" + }, + "./*.node": "./*.node", + "./package.json": "./package.json" }, - "main": "noop.cjs", - "types": "noop.d.ts", "files": [ "*.node", - "noop.cjs", - "noop.d.ts" + "noop.mjs", + "noop.d.mts" ] } diff --git a/cli/npm/win32-x64-msvc/package.json b/cli/npm/win32-x64-msvc/package.json index c8234e2b..62160174 100644 --- a/cli/npm/win32-x64-msvc/package.json +++ b/cli/npm/win32-x64-msvc/package.json @@ -9,13 +9,19 @@ ], "license": "AGPL-3.0-only", "scripts": { - "prepack": "node ../../../scripts/write-platform-package-shims.mjs ." + "prepack": "pnpm exec tsx ../../../scripts/write-platform-package-shims.ts ." + }, + "exports": { + ".": { + "types": "./noop.d.mts", + "import": "./noop.mjs" + }, + "./*.node": "./*.node", + "./package.json": "./package.json" }, - "main": "noop.cjs", - "types": "noop.d.ts", "files": [ "*.node", - "noop.cjs", - "noop.d.ts" + "noop.mjs", + "noop.d.mts" ] } From 777f0c5f6ddedf05cb922e97ec213f22674e9f22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Tue, 31 Mar 2026 16:00:23 +0800 Subject: [PATCH 17/27] chore: update dependencies in pnpm-workspace.yaml - bump @modelcontextprotocol/sdk from ^1.28.0 to ^1.29.0 - bump @tanstack/react-router from ^1.168.8 to ^1.168.10 - bump @tanstack/router-generator from ^1.166.22 to ^1.166.24 - bump @tanstack/router-plugin from ^1.167.9 to ^1.167.12 - bump material-icon-theme from ^5.32.0 to ^5.33.1 - bump turbo from ^2.8.21 to ^2.9.1 - bump typescript-eslint from ^8.57.2 to ^8.58.0 --- pnpm-lock.yaml | 435 ++++++++++++++++++++++---------------------- pnpm-workspace.yaml | 14 +- 2 files changed, 226 insertions(+), 223 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 14fed2c5..6d854837 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -16,8 +16,8 @@ catalogs: specifier: ^10.0.1 version: 10.0.1 '@modelcontextprotocol/sdk': - specifier: ^1.28.0 - version: 1.28.0 + specifier: ^1.29.0 + version: 1.29.0 '@monaco-editor/react': specifier: ^4.7.0 version: 4.7.0 @@ -31,14 +31,14 @@ catalogs: specifier: ^4.2.2 version: 4.2.2 '@tanstack/react-router': - specifier: ^1.168.8 - version: 1.168.8 + specifier: ^1.168.10 + version: 1.168.10 '@tanstack/router-generator': - specifier: ^1.166.22 - version: 1.166.22 + specifier: ^1.166.24 + version: 1.166.24 '@tanstack/router-plugin': - specifier: ^1.167.9 - version: 1.167.9 + specifier: ^1.167.12 + version: 1.167.12 '@tauri-apps/api': specifier: ^2.10.1 version: 2.10.1 @@ -136,8 +136,8 @@ catalogs: specifier: ^1.7.0 version: 1.7.0 material-icon-theme: - specifier: ^5.32.0 - version: 5.32.0 + specifier: ^5.33.1 + version: 5.33.1 mdast-util-mdx: specifier: ^3.0.0 version: 3.0.0 @@ -208,8 +208,8 @@ catalogs: specifier: ^4.21.0 version: 4.21.0 turbo: - specifier: ^2.8.21 - version: 2.8.21 + specifier: ^2.9.1 + version: 2.9.1 tw-animate-css: specifier: ^1.4.0 version: 1.4.0 @@ -217,8 +217,8 @@ catalogs: specifier: 6.0.2 version: 6.0.2 typescript-eslint: - specifier: ^8.57.2 - version: 8.57.2 + specifier: ^8.58.0 + version: 8.58.0 unified: specifier: ^11.0.5 version: 11.0.5 @@ -244,7 +244,7 @@ importers: devDependencies: '@antfu/eslint-config': specifier: 'catalog:' - version: 7.7.3(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.56.1(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@typescript-eslint/typescript-estree@8.57.2(typescript@6.0.2))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@unocss/eslint-plugin@66.6.7(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@vue/compiler-sfc@3.5.26)(eslint-plugin-format@2.0.1(eslint@10.1.0(jiti@2.6.1)))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))) + version: 7.7.3(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.56.1(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@typescript-eslint/typescript-estree@8.58.0(typescript@6.0.2))(@typescript-eslint/utils@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@unocss/eslint-plugin@66.6.7(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@vue/compiler-sfc@3.5.26)(eslint-plugin-format@2.0.1(eslint@10.1.0(jiti@2.6.1)))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))) '@eslint/js': specifier: 'catalog:' version: 10.0.1(eslint@10.1.0(jiti@2.6.1)) @@ -256,7 +256,7 @@ importers: version: 16.2.1 '@truenine/eslint10-config': specifier: 'catalog:' - version: 2026.10326.11646(3147283cf2a5f1693493984073c80bb9) + version: 2026.10326.11646(8cca449280c02e5bb3a580f8f930062e) '@types/node': specifier: 'catalog:' version: 25.5.0 @@ -268,7 +268,7 @@ importers: version: 10.2.0(eslint@10.1.0(jiti@2.6.1))(prettier@3.8.1) '@vue/eslint-config-typescript': specifier: 'catalog:' - version: 14.7.0(eslint-plugin-vue@10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + version: 14.7.0(eslint-plugin-vue@10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) eslint: specifier: 'catalog:' version: 10.1.0(jiti@2.6.1) @@ -280,7 +280,7 @@ importers: version: 5.5.5(eslint-config-prettier@10.1.8(eslint@10.1.0(jiti@2.6.1)))(eslint@10.1.0(jiti@2.6.1))(prettier@3.8.1) eslint-plugin-vue: specifier: 'catalog:' - version: 10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))) + version: 10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))) fast-check: specifier: 'catalog:' version: 4.6.0 @@ -301,13 +301,13 @@ importers: version: 4.21.0 turbo: specifier: 'catalog:' - version: 2.8.21 + version: 2.9.1 typescript: specifier: 'catalog:' version: 6.0.2 typescript-eslint: specifier: 'catalog:' - version: 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + version: 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) vite: specifier: 'catalog:' version: 8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3) @@ -317,6 +317,9 @@ importers: cli: dependencies: + '@truenine/script-runtime': + specifier: workspace:* + version: link:../libraries/script-runtime json5: specifier: 'catalog:' version: 2.2.3 @@ -329,7 +332,7 @@ importers: devDependencies: '@truenine/eslint10-config': specifier: 'catalog:' - version: 2026.10326.11646(3147283cf2a5f1693493984073c80bb9) + version: 2026.10326.11646(8cca449280c02e5bb3a580f8f930062e) '@truenine/memory-sync-sdk': specifier: workspace:* version: link:../sdk @@ -410,7 +413,7 @@ importers: devDependencies: '@truenine/eslint10-config': specifier: 'catalog:' - version: 2026.10326.11646(3147283cf2a5f1693493984073c80bb9) + version: 2026.10326.11646(8cca449280c02e5bb3a580f8f930062e) '@types/node': specifier: 'catalog:' version: 25.5.0 @@ -452,13 +455,13 @@ importers: version: 4.2.2(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) '@tanstack/react-router': specifier: 'catalog:' - version: 1.168.8(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + version: 1.168.10(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@tanstack/router-generator': specifier: 'catalog:' - version: 1.166.22 + version: 1.166.24 '@tanstack/router-plugin': specifier: 'catalog:' - version: 1.167.9(@tanstack/react-router@1.168.8(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) + version: 1.167.12(@tanstack/react-router@1.168.10(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) '@tauri-apps/api': specifier: 'catalog:' version: 2.10.1 @@ -494,7 +497,7 @@ importers: version: 1.7.0(react@19.2.4) material-icon-theme: specifier: 'catalog:' - version: 5.32.0 + version: 5.33.1 monaco-editor: specifier: 'catalog:' version: 0.55.1 @@ -596,7 +599,7 @@ importers: version: 3.6.0(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0) '@truenine/eslint10-config': specifier: 'catalog:' - version: 2026.10326.11646(3147283cf2a5f1693493984073c80bb9) + version: 2026.10326.11646(8cca449280c02e5bb3a580f8f930062e) '@types/node': specifier: 'catalog:' version: 25.5.0 @@ -623,7 +626,7 @@ importers: dependencies: '@modelcontextprotocol/sdk': specifier: 'catalog:' - version: 1.28.0(zod@4.3.6) + version: 1.29.0(zod@4.3.6) '@truenine/memory-sync-sdk': specifier: workspace:* version: link:../sdk @@ -633,7 +636,7 @@ importers: devDependencies: '@truenine/eslint10-config': specifier: 'catalog:' - version: 2026.10326.11646(3147283cf2a5f1693493984073c80bb9) + version: 2026.10326.11646(8cca449280c02e5bb3a580f8f930062e) '@types/node': specifier: 'catalog:' version: 25.5.0 @@ -658,6 +661,15 @@ importers: sdk: dependencies: + '@truenine/logger': + specifier: workspace:* + version: link:../libraries/logger + '@truenine/md-compiler': + specifier: workspace:* + version: link:../libraries/md-compiler + '@truenine/script-runtime': + specifier: workspace:* + version: link:../libraries/script-runtime json5: specifier: 'catalog:' version: 2.2.3 @@ -671,15 +683,6 @@ importers: '@clack/prompts': specifier: 'catalog:' version: 1.1.0 - '@truenine/logger': - specifier: workspace:* - version: link:../libraries/logger - '@truenine/md-compiler': - specifier: workspace:* - version: link:../libraries/md-compiler - '@truenine/script-runtime': - specifier: workspace:* - version: link:../libraries/script-runtime '@types/fs-extra': specifier: 'catalog:' version: 11.0.4 @@ -1545,8 +1548,8 @@ packages: '@mermaid-js/parser@1.0.1': resolution: {integrity: sha512-opmV19kN1JsK0T6HhhokHpcVkqKpF+x2pPDKKM2ThHtZAB5F4PROopk0amuVYK5qMrIA4erzpNm8gmPNJgMDxQ==} - '@modelcontextprotocol/sdk@1.28.0': - resolution: {integrity: sha512-gmloF+i+flI8ouQK7MWW4mOwuMh4RePBuPFAEPC6+pdqyWOUMDOixb6qZ69owLJpz6XmyllCouc4t8YWO+E2Nw==} + '@modelcontextprotocol/sdk@1.29.0': + resolution: {integrity: sha512-zo37mZA9hJWpULgkRpowewez1y6ML5GsXJPY8FI0tBBCd77HEvza4jDqRKOXgHNn867PVGCyTdzqpz0izu5ZjQ==} engines: {node: '>=18'} peerDependencies: '@cfworker/json-schema': ^4.1.1 @@ -2601,8 +2604,8 @@ packages: resolution: {integrity: sha512-NaOGLRrddszbQj9upGat6HG/4TKvXLvu+osAIgfxPYA+eIvYKv8GKDJOrY2D3/U9MRnKfMWD7bU4jeD4xmqyIg==} engines: {node: '>=20.19'} - '@tanstack/react-router@1.168.8': - resolution: {integrity: sha512-t0S0QueXubBKmI9eLPcN/A1sLQgTu8/yHerjrvvsGeD12zMdw0uJPKwEKpStQF2OThQtw64cs34uUSYXBUTSNw==} + '@tanstack/react-router@1.168.10': + resolution: {integrity: sha512-/RmDlOwDkCug609KdPB3U+U1zmrtadJpvsmRg2zEn8TRCKRNri7dYZIjQZbNg8PgUiRL4T6njrZBV1ChzblNaA==} engines: {node: '>=20.19'} peerDependencies: react: '>=18.0.0 || >=19.0.0' @@ -2620,22 +2623,22 @@ packages: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - '@tanstack/router-core@1.168.7': - resolution: {integrity: sha512-z4UEdlzMrFaKBsG4OIxlZEm+wsYBtEp//fnX6kW18jhQpETNcM6u2SXNdX+bcIYp6AaR7ERS3SBENzjC/xxwQQ==} + '@tanstack/router-core@1.168.9': + resolution: {integrity: sha512-18oeEwEDyXOIuO1VBP9ACaK7tYHZUjynGDCoUh/5c/BNhia9vCJCp9O0LfhZXOorDc/PmLSgvmweFhVmIxF10g==} engines: {node: '>=20.19'} hasBin: true - '@tanstack/router-generator@1.166.22': - resolution: {integrity: sha512-wQ7H8/Q2rmSPuaxWnurJ3DATNnqWV2tajxri9TSiW4QHsG7cWPD34+goeIinKG+GajJyEdfVpz6w/gRJXfbAPw==} + '@tanstack/router-generator@1.166.24': + resolution: {integrity: sha512-vdaGKwuH+r+DPe6R1mjk+TDDmDH6NTG7QqwxHqGEvOH4aGf9sPjhmRKNJZqQr8cPIbfp6u5lXyZ1TeDcSNMVEA==} engines: {node: '>=20.19'} - '@tanstack/router-plugin@1.167.9': - resolution: {integrity: sha512-h/VV05FEHd4PVyc5Zy8B3trWLcdLt/Pmp+mfifmBKGRw+MUtvdQKbBHhmy4ouOf67s5zDJMc+n8R3xgU7bDwFA==} + '@tanstack/router-plugin@1.167.12': + resolution: {integrity: sha512-StEHcctCuFI5taSjO+lhR/yQ+EK63BdyYa+ne6FoNQPB3MMrOUrz2ZVnbqILRLkh2b+p2EfBKt65sgAKdKygPQ==} engines: {node: '>=20.19'} hasBin: true peerDependencies: '@rsbuild/core': '>=1.0.2' - '@tanstack/react-router': ^1.168.8 + '@tanstack/react-router': ^1.168.10 vite: '>=5.0.0 || >=6.0.0 || >=7.0.0' vite-plugin-solid: ^2.11.10 webpack: '>=5.92.0' @@ -2778,33 +2781,33 @@ packages: '@ts-morph/common@0.28.1': resolution: {integrity: sha512-W74iWf7ILp1ZKNYXY5qbddNaml7e9Sedv5lvU1V8lftlitkc9Pq1A+jlH23ltDgWYeZFFEqGCD1Ies9hqu3O+g==} - '@turbo/darwin-64@2.8.21': - resolution: {integrity: sha512-kfGoM0Iw8ZNZpbds+4IzOe0hjvHldqJwUPRAjXJi3KBxg/QOZL95N893SRoMtf2aJ+jJ3dk32yPkp8rvcIjP9g==} + '@turbo/darwin-64@2.9.1': + resolution: {integrity: sha512-d1zTcIf6VWT7cdfjhi0X36C2PRsUi2HdEwYzVgkLHmuuYtL+1Y1Zu3JdlouoB/NjG2vX3q4NnKLMNhDOEweoIg==} cpu: [x64] os: [darwin] - '@turbo/darwin-arm64@2.8.21': - resolution: {integrity: sha512-o9HEflxUEyr987x0cTUzZBhDOyL6u95JmdmlkH2VyxAw7zq2sdtM5e72y9ufv2N5SIoOBw1fVn9UES5VY5H6vQ==} + '@turbo/darwin-arm64@2.9.1': + resolution: {integrity: sha512-AwJ4mA++Kpem33Lcov093hS1LrgqbKxqq5FCReoqsA8ayEG6eAJAo8ItDd9qQTdBiXxZH8GHCspLAMIe1t3Xyw==} cpu: [arm64] os: [darwin] - '@turbo/linux-64@2.8.21': - resolution: {integrity: sha512-uTxlCcXWy5h1fSSymP8XSJ+AudzEHMDV3IDfKX7+DGB8kgJ+SLoTUAH7z4OFA7I/l2sznz0upPdbNNZs91YMag==} + '@turbo/linux-64@2.9.1': + resolution: {integrity: sha512-HT9SjKkjEw9uvlgly/qwCGEm4wOXOwQPSPS+wkg+/O1Qan3F1uU/0PFYzxl3m4lfuV3CP9wr2Dq5dPrUX+B9Ag==} cpu: [x64] os: [linux] - '@turbo/linux-arm64@2.8.21': - resolution: {integrity: sha512-cdHIcxNcihHHkCHp0Y4Zb60K4Qz+CK4xw1gb6s/t/9o4SMeMj+hTBCtoW6QpPnl9xPYmxuTou8Zw6+cylTnREg==} + '@turbo/linux-arm64@2.9.1': + resolution: {integrity: sha512-+4s5GZs3kjxc1KMhLBhoQy4UBkXjOhgidA9ipNllkA4JLivSqUCuOgU1Xbyp6vzYrsqHJ9vvwo/2mXgEtD6ZHg==} cpu: [arm64] os: [linux] - '@turbo/windows-64@2.8.21': - resolution: {integrity: sha512-/iBj4OzbqEY8CX+eaeKbBTMZv2CLXNrt0692F7HnK7LcyYwyDecaAiSET6ZzL4opT7sbwkKvzAC/fhqT3Quu1A==} + '@turbo/windows-64@2.9.1': + resolution: {integrity: sha512-ZO7GCyQd5HV564XWHc9KysjanFfM3DmnWquyEByu+hQMq42g9OMU/fYOCfHS6Xj2aXkIg2FHJeRV+iAck2YrbQ==} cpu: [x64] os: [win32] - '@turbo/windows-arm64@2.8.21': - resolution: {integrity: sha512-95tMA/ZbIidJFUUtkmqioQ1gf3n3I1YbRP3ZgVdWTVn2qVbkodcIdGXBKRHHrIbRsLRl99SiHi/L7IxhpZDagQ==} + '@turbo/windows-arm64@2.9.1': + resolution: {integrity: sha512-BjX2fdz38mBb/H94JXrD5cJ+mEq8NmsCbYdC42JzQebJ0X8EdNgyFoEhOydPGViOmaRmhhdZnPZKKn6wahSpcA==} cpu: [arm64] os: [win32] @@ -2981,13 +2984,13 @@ packages: '@types/use-sync-external-store@0.0.6': resolution: {integrity: sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==} - '@typescript-eslint/eslint-plugin@8.57.2': - resolution: {integrity: sha512-NZZgp0Fm2IkD+La5PR81sd+g+8oS6JwJje+aRWsDocxHkjyRw0J5L5ZTlN3LI1LlOcGL7ph3eaIUmTXMIjLk0w==} + '@typescript-eslint/eslint-plugin@8.58.0': + resolution: {integrity: sha512-RLkVSiNuUP1C2ROIWfqX+YcUfLaSnxGE/8M+Y57lopVwg9VTYYfhuz15Yf1IzCKgZj6/rIbYTmJCUSqr76r0Wg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.57.2 + '@typescript-eslint/parser': ^8.58.0 eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' '@typescript-eslint/parser@8.56.1': resolution: {integrity: sha512-klQbnPAAiGYFyI02+znpBRLyjL4/BrBd0nyWkdC0s/6xFLkXYQ8OoRrSkqacS1ddVxf/LDyODIKbQ5TgKAf/Fg==} @@ -2996,12 +2999,12 @@ packages: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/parser@8.57.2': - resolution: {integrity: sha512-30ScMRHIAD33JJQkgfGW1t8CURZtjc2JpTrq5n2HFhOefbAhb7ucc7xJwdWcrEtqUIYJ73Nybpsggii6GtAHjA==} + '@typescript-eslint/parser@8.58.0': + resolution: {integrity: sha512-rLoGZIf9afaRBYsPUMtvkDWykwXwUPL60HebR4JgTI8mxfFe2cQTu3AGitANp4b9B2QlVru6WzjgB2IzJKiCSA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' '@typescript-eslint/project-service@8.56.1': resolution: {integrity: sha512-TAdqQTzHNNvlVFfR+hu2PDJrURiwKsUvxFn1M0h95BB8ah5jejas08jUWG4dBA68jDMI988IvtfdAI53JzEHOQ==} @@ -3009,11 +3012,11 @@ packages: peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/project-service@8.57.2': - resolution: {integrity: sha512-FuH0wipFywXRTHf+bTTjNyuNQQsQC3qh/dYzaM4I4W0jrCqjCVuUh99+xd9KamUfmCGPvbO8NDngo/vsnNVqgw==} + '@typescript-eslint/project-service@8.58.0': + resolution: {integrity: sha512-8Q/wBPWLQP1j16NxoPNIKpDZFMaxl7yWIoqXWYeWO+Bbd2mjgvoF0dxP2jKZg5+x49rgKdf7Ck473M8PC3V9lg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' '@typescript-eslint/rule-tester@8.56.1': resolution: {integrity: sha512-EWuV5Vq1EFYJEOVcILyWPO35PjnT0c6tv99PCpD12PgfZae5/Jo+F17hGjsEs2Moe+Dy1J7KIr8y037cK8+/rQ==} @@ -3025,8 +3028,8 @@ packages: resolution: {integrity: sha512-YAi4VDKcIZp0O4tz/haYKhmIDZFEUPOreKbfdAN3SzUDMcPhJ8QI99xQXqX+HoUVq8cs85eRKnD+rne2UAnj2w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/scope-manager@8.57.2': - resolution: {integrity: sha512-snZKH+W4WbWkrBqj4gUNRIGb/jipDW3qMqVJ4C9rzdFc+wLwruxk+2a5D+uoFcKPAqyqEnSb4l2ULuZf95eSkw==} + '@typescript-eslint/scope-manager@8.58.0': + resolution: {integrity: sha512-W1Lur1oF50FxSnNdGp3Vs6P+yBRSmZiw4IIjEeYxd8UQJwhUF0gDgDD/W/Tgmh73mxgEU3qX0Bzdl/NGuSPEpQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@typescript-eslint/tsconfig-utils@8.56.1': @@ -3035,25 +3038,25 @@ packages: peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/tsconfig-utils@8.57.2': - resolution: {integrity: sha512-3Lm5DSM+DCowsUOJC+YqHHnKEfFh5CoGkj5Z31NQSNF4l5wdOwqGn99wmwN/LImhfY3KJnmordBq/4+VDe2eKw==} + '@typescript-eslint/tsconfig-utils@8.58.0': + resolution: {integrity: sha512-doNSZEVJsWEu4htiVC+PR6NpM+pa+a4ClH9INRWOWCUzMst/VA9c4gXq92F8GUD1rwhNvRLkgjfYtFXegXQF7A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' - '@typescript-eslint/type-utils@8.57.2': - resolution: {integrity: sha512-Co6ZCShm6kIbAM/s+oYVpKFfW7LBc6FXoPXjTRQ449PPNBY8U0KZXuevz5IFuuUj2H9ss40atTaf9dlGLzbWZg==} + '@typescript-eslint/type-utils@8.58.0': + resolution: {integrity: sha512-aGsCQImkDIqMyx1u4PrVlbi/krmDsQUs4zAcCV6M7yPcPev+RqVlndsJy9kJ8TLihW9TZ0kbDAzctpLn5o+lOg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' '@typescript-eslint/types@8.56.1': resolution: {integrity: sha512-dbMkdIUkIkchgGDIv7KLUpa0Mda4IYjo4IAMJUZ+3xNoUXxMsk9YtKpTHSChRS85o+H9ftm51gsK1dZReY9CVw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/types@8.57.2': - resolution: {integrity: sha512-/iZM6FnM4tnx9csuTxspMW4BOSegshwX5oBDznJ7S4WggL7Vczz5d2W11ecc4vRrQMQHXRSxzrCsyG5EsPPTbA==} + '@typescript-eslint/types@8.58.0': + resolution: {integrity: sha512-O9CjxypDT89fbHxRfETNoAnHj/i6IpRK0CvbVN3qibxlLdo5p5hcLmUuCCrHMpxiWSwKyI8mCP7qRNYuOJ0Uww==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@typescript-eslint/typescript-estree@8.56.1': @@ -3062,11 +3065,11 @@ packages: peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/typescript-estree@8.57.2': - resolution: {integrity: sha512-2MKM+I6g8tJxfSmFKOnHv2t8Sk3T6rF20A1Puk0svLK+uVapDZB/4pfAeB7nE83uAZrU6OxW+HmOd5wHVdXwXA==} + '@typescript-eslint/typescript-estree@8.58.0': + resolution: {integrity: sha512-7vv5UWbHqew/dvs+D3e1RvLv1v2eeZ9txRHPnEEBUgSNLx5ghdzjHa0sgLWYVKssH+lYmV0JaWdoubo0ncGYLA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' '@typescript-eslint/utils@8.56.1': resolution: {integrity: sha512-HPAVNIME3tABJ61siYlHzSWCGtOoeP2RTIaHXFMPqjrQKCGB9OgUVdiNgH7TJS2JNIQ5qQ4RsAUDuGaGme/KOA==} @@ -3075,19 +3078,19 @@ packages: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/utils@8.57.2': - resolution: {integrity: sha512-krRIbvPK1ju1WBKIefiX+bngPs+odIQUtR7kymzPfo1POVw3jlF+nLkmexdSSd4UCbDcQn+wMBATOOmpBbqgKg==} + '@typescript-eslint/utils@8.58.0': + resolution: {integrity: sha512-RfeSqcFeHMHlAWzt4TBjWOAtoW9lnsAGiP3GbaX9uVgTYYrMbVnGONEfUCiSss+xMHFl+eHZiipmA8WkQ7FuNA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' '@typescript-eslint/visitor-keys@8.56.1': resolution: {integrity: sha512-KiROIzYdEV85YygXw6BI/Dx4fnBlFQu6Mq4QE4MOH9fFnhohw6wX/OAvDY2/C+ut0I3RSPKenvZJIVYqJNkhEw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/visitor-keys@8.57.2': - resolution: {integrity: sha512-zhahknjobV2FiD6Ee9iLbS7OV9zi10rG26odsQdfBO/hjSzUQbkIYgda+iNKK1zNiW2ey+Lf8MU5btN17V3dUw==} + '@typescript-eslint/visitor-keys@8.58.0': + resolution: {integrity: sha512-XJ9UD9+bbDo4a4epraTwG3TsNPeiB9aShrUneAVXy8q4LuwowN+qu89/6ByLMINqvIMeI9H9hOHQtg/ijrYXzQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@typescript/vfs@1.6.4': @@ -3767,8 +3770,8 @@ packages: ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} - electron-to-chromium@1.5.328: - resolution: {integrity: sha512-QNQ5l45DzYytThO21403XN3FvK0hOkWDG8viNf6jqS42msJ8I4tGDSpBCgvDRRPnkffafiwAym2X2eHeGD2V0w==} + electron-to-chromium@1.5.329: + resolution: {integrity: sha512-/4t+AS1l4S3ZC0Ja7PHFIWeBIxGA3QGqV8/yKsP36v7NcyUCl+bIcmw6s5zVuMIECWwBrAK/6QLzTmbJChBboQ==} emnapi@1.9.1: resolution: {integrity: sha512-s4RbfzgbYg9cWBZXJT6LazImJQ5p+F+LyTsCWQJXbGVdPmtCtdlwqd0Oiv3O51KyYV/Hq58xszaQ/l153tK6Uw==} @@ -4141,8 +4144,8 @@ packages: resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==} engines: {node: '>=12.0.0'} - express-rate-limit@8.3.1: - resolution: {integrity: sha512-D1dKN+cmyPWuvB+G2SREQDzPY1agpBIcTa9sJxOPMCNeH3gwzhqJRDWCXW3gg0y//+LQ/8j52JbMROWyrKdMdw==} + express-rate-limit@8.3.2: + resolution: {integrity: sha512-77VmFeJkO0/rvimEDuUC5H30oqUC4EyOhyGccfqoLebB0oiEYfM7nwPrsDsBL1gsTpwfzX8SFy2MT3TDyRq+bg==} engines: {node: '>= 16'} peerDependencies: express: '>= 4.11' @@ -4517,8 +4520,8 @@ packages: resolution: {integrity: sha512-jv+8jaWCl0g2lSBkNSVXdzfBA0npK1HGC2KtWM9FumFRoGS94g3NbCCLVnCYHLjp4GrW2KZeeSTMo5ddtznmGw==} engines: {node: '>=18'} - isbot@5.1.36: - resolution: {integrity: sha512-C/ZtXyJqDPZ7G7JPr06ApWyYoHjYexQbS6hPYD4WYCzpv2Qes6Z+CCEfTX4Owzf+1EJ933PoI2p+B9v7wpGZBQ==} + isbot@5.1.37: + resolution: {integrity: sha512-5bcicX81xf6NlTEV8rWdg7Pk01LFizDetuYGHx6d/f6y3lR2/oo8IfxjzJqn1UdDEyCcwT9e7NRloj8DwCYujQ==} engines: {node: '>=18'} isexe@2.0.0: @@ -4754,8 +4757,8 @@ packages: engines: {node: '>= 20'} hasBin: true - material-icon-theme@5.32.0: - resolution: {integrity: sha512-SxJxCcnk6cJIbd+AxmoeghXJ24joXGmUzjiGci16sX4mXZdXprGEzM6ZZ0VHGAofxNlMqznEbExINwFLsxf8eQ==} + material-icon-theme@5.33.1: + resolution: {integrity: sha512-imiILyUW/Y6YvxbobfReC4PpzjPfpMTUiiH9gN5sloQ3dJAhfJRE40l2q8pMT8+V4xRHiIZ5vaZtJw4D03qduQ==} engines: {vscode: ^1.55.0} math-intrinsics@1.1.0: @@ -4972,8 +4975,8 @@ packages: resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==} engines: {node: '>=12'} - minimatch@10.2.4: - resolution: {integrity: sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==} + minimatch@10.2.5: + resolution: {integrity: sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==} engines: {node: 18 || 20 || >=22} mj-context-menu@0.6.1: @@ -5186,8 +5189,8 @@ packages: resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} engines: {node: '>=12'} - path-to-regexp@8.4.0: - resolution: {integrity: sha512-PuseHIvAnz3bjrM2rGJtSgo1zjgxapTLZ7x2pjhzWwlp4SJQgK3f3iZIQwkpEnBaKz6seKBADpM4B4ySkuYypg==} + path-to-regexp@8.4.1: + resolution: {integrity: sha512-fvU78fIjZ+SBM9YwCknCvKOUKkLVqtWDVctl0s7xIqfmfb38t2TT4ZU2gHm+Z8xGwgW+QWEU3oQSAzIbo89Ggw==} pathe@2.0.3: resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} @@ -5823,8 +5826,8 @@ packages: engines: {node: '>=18.0.0'} hasBin: true - turbo@2.8.21: - resolution: {integrity: sha512-FlJ8OD5Qcp0jTAM7E4a/RhUzRNds2GzKlyxHKA6N247VLy628rrxAGlMpIXSz6VB430+TiQDJ/SMl6PL1lu6wQ==} + turbo@2.9.1: + resolution: {integrity: sha512-TO9du8MwLTAKoXcGezekh9cPJabJUb0+8KxtpMR6kXdRASrmJ8qXf2GkVbCREgzbMQakzfNcux9cZtxheDY4RQ==} hasBin: true tw-animate-css@1.4.0: @@ -5849,12 +5852,12 @@ packages: resolution: {integrity: sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==} engines: {node: '>= 0.6'} - typescript-eslint@8.57.2: - resolution: {integrity: sha512-VEPQ0iPgWO/sBaZOU1xo4nuNdODVOajPnTIbog2GKYr31nIlZ0fWPoCQgGfF3ETyBl1vn63F/p50Um9Z4J8O8A==} + typescript-eslint@8.58.0: + resolution: {integrity: sha512-e2TQzKfaI85fO+F3QywtX+tCTsu/D3WW5LVU6nz8hTFKFZ8yBJ6mSYRpXqdR3mFjPWmO0eWsTa5f+UpAOe/FMA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 - typescript: '>=4.8.4 <6.0.0' + typescript: '>=4.8.4 <6.1.0' typescript@6.0.2: resolution: {integrity: sha512-bGdAIrZ0wiGDo5l8c++HWtbaNCWTS4UTv7RaTH/ThVIgjkveJt83m74bBHMJkuCbslY8ixgLBVZJIOiQlQTjfQ==} @@ -6166,7 +6169,7 @@ packages: snapshots: - '@antfu/eslint-config@7.7.3(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.56.1(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@typescript-eslint/typescript-estree@8.57.2(typescript@6.0.2))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@unocss/eslint-plugin@66.6.7(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@vue/compiler-sfc@3.5.26)(eslint-plugin-format@2.0.1(eslint@10.1.0(jiti@2.6.1)))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)))': + '@antfu/eslint-config@7.7.3(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.56.1(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@typescript-eslint/typescript-estree@8.58.0(typescript@6.0.2))(@typescript-eslint/utils@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@unocss/eslint-plugin@66.6.7(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@vue/compiler-sfc@3.5.26)(eslint-plugin-format@2.0.1(eslint@10.1.0(jiti@2.6.1)))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)))': dependencies: '@antfu/install-pkg': 1.1.0 '@clack/prompts': 1.1.0 @@ -6174,9 +6177,9 @@ snapshots: '@eslint-community/eslint-plugin-eslint-comments': 4.7.1(eslint@10.1.0(jiti@2.6.1)) '@eslint/markdown': 7.5.1 '@stylistic/eslint-plugin': 5.10.0(eslint@10.1.0(jiti@2.6.1)) - '@typescript-eslint/eslint-plugin': 8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) - '@typescript-eslint/parser': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) - '@vitest/eslint-plugin': 1.6.13(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))) + '@typescript-eslint/eslint-plugin': 8.58.0(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/parser': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@vitest/eslint-plugin': 1.6.13(@typescript-eslint/eslint-plugin@8.58.0(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))) ansis: 4.2.0 cac: 7.0.0 eslint: 10.1.0(jiti@2.6.1) @@ -6184,7 +6187,7 @@ snapshots: eslint-flat-config-utils: 3.0.2 eslint-merge-processors: 2.0.0(eslint@10.1.0(jiti@2.6.1)) eslint-plugin-antfu: 3.2.2(eslint@10.1.0(jiti@2.6.1)) - eslint-plugin-command: 3.5.2(@typescript-eslint/rule-tester@8.56.1(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@typescript-eslint/typescript-estree@8.57.2(typescript@6.0.2))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1)) + eslint-plugin-command: 3.5.2(@typescript-eslint/rule-tester@8.56.1(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@typescript-eslint/typescript-estree@8.58.0(typescript@6.0.2))(@typescript-eslint/utils@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1)) eslint-plugin-import-lite: 0.5.2(eslint@10.1.0(jiti@2.6.1)) eslint-plugin-jsdoc: 62.8.1(eslint@10.1.0(jiti@2.6.1)) eslint-plugin-jsonc: 3.1.2(eslint@10.1.0(jiti@2.6.1)) @@ -6195,8 +6198,8 @@ snapshots: eslint-plugin-regexp: 3.1.0(eslint@10.1.0(jiti@2.6.1)) eslint-plugin-toml: 1.3.1(eslint@10.1.0(jiti@2.6.1)) eslint-plugin-unicorn: 63.0.0(eslint@10.1.0(jiti@2.6.1)) - eslint-plugin-unused-imports: 4.4.1(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1)) - eslint-plugin-vue: 10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))) + eslint-plugin-unused-imports: 4.4.1(@typescript-eslint/eslint-plugin@8.58.0(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1)) + eslint-plugin-vue: 10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))) eslint-plugin-yml: 3.3.1(eslint@10.1.0(jiti@2.6.1)) eslint-processor-vue-blocks: 2.0.0(@vue/compiler-sfc@3.5.26)(eslint@10.1.0(jiti@2.6.1)) globals: 17.4.0 @@ -6420,7 +6423,7 @@ snapshots: '@es-joy/jsdoccomment@0.84.0': dependencies: '@types/estree': 1.0.8 - '@typescript-eslint/types': 8.57.2 + '@typescript-eslint/types': 8.58.0 comment-parser: 1.4.5 esquery: 1.7.0 jsdoc-type-pratt-parser: 7.1.1 @@ -6528,7 +6531,7 @@ snapshots: dependencies: '@eslint/object-schema': 3.0.3 debug: 4.4.3 - minimatch: 10.2.4 + minimatch: 10.2.5 transitivePeerDependencies: - supports-color @@ -6905,7 +6908,7 @@ snapshots: dependencies: langium: 4.2.1 - '@modelcontextprotocol/sdk@1.28.0(zod@4.3.6)': + '@modelcontextprotocol/sdk@1.29.0(zod@4.3.6)': dependencies: '@hono/node-server': 1.19.12(hono@4.12.9) ajv: 8.18.0 @@ -6916,7 +6919,7 @@ snapshots: eventsource: 3.0.7 eventsource-parser: 3.0.6 express: 5.2.1 - express-rate-limit: 8.3.1(express@5.2.1) + express-rate-limit: 8.3.2(express@5.2.1) hono: 4.12.9 jose: 6.2.2 json-schema-typed: 8.0.2 @@ -7623,7 +7626,7 @@ snapshots: '@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1))': dependencies: '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1)) - '@typescript-eslint/types': 8.57.2 + '@typescript-eslint/types': 8.58.0 eslint: 10.1.0(jiti@2.6.1) eslint-visitor-keys: 4.2.1 espree: 10.4.0 @@ -7708,12 +7711,12 @@ snapshots: '@tanstack/history@1.161.6': {} - '@tanstack/react-router@1.168.8(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + '@tanstack/react-router@1.168.10(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: '@tanstack/history': 1.161.6 '@tanstack/react-store': 0.9.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4) - '@tanstack/router-core': 1.168.7 - isbot: 5.1.36 + '@tanstack/router-core': 1.168.9 + isbot: 5.1.37 react: 19.2.4 react-dom: 19.2.4(react@19.2.4) @@ -7730,16 +7733,16 @@ snapshots: react: 19.2.4 react-dom: 19.2.4(react@19.2.4) - '@tanstack/router-core@1.168.7': + '@tanstack/router-core@1.168.9': dependencies: '@tanstack/history': 1.161.6 cookie-es: 2.0.0 seroval: 1.5.1 seroval-plugins: 1.5.1(seroval@1.5.1) - '@tanstack/router-generator@1.166.22': + '@tanstack/router-generator@1.166.24': dependencies: - '@tanstack/router-core': 1.168.7 + '@tanstack/router-core': 1.168.9 '@tanstack/router-utils': 1.161.6 '@tanstack/virtual-file-routes': 1.161.7 prettier: 3.8.1 @@ -7750,7 +7753,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@tanstack/router-plugin@1.167.9(@tanstack/react-router@1.168.8(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))': + '@tanstack/router-plugin@1.167.12(@tanstack/react-router@1.168.10(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))': dependencies: '@babel/core': 7.29.0 '@babel/plugin-syntax-jsx': 7.28.6(@babel/core@7.29.0) @@ -7758,15 +7761,15 @@ snapshots: '@babel/template': 7.28.6 '@babel/traverse': 7.29.0 '@babel/types': 7.29.0 - '@tanstack/router-core': 1.168.7 - '@tanstack/router-generator': 1.166.22 + '@tanstack/router-core': 1.168.9 + '@tanstack/router-generator': 1.166.24 '@tanstack/router-utils': 1.161.6 '@tanstack/virtual-file-routes': 1.161.7 chokidar: 3.6.0 unplugin: 2.3.11 zod: 3.25.76 optionalDependencies: - '@tanstack/react-router': 1.168.8(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@tanstack/react-router': 1.168.10(react-dom@19.2.4(react@19.2.4))(react@19.2.4) vite: 8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3) transitivePeerDependencies: - supports-color @@ -7859,44 +7862,44 @@ snapshots: npm-to-yarn: 3.0.1 unist-util-visit: 5.1.0 - '@truenine/eslint10-config@2026.10326.11646(3147283cf2a5f1693493984073c80bb9)': + '@truenine/eslint10-config@2026.10326.11646(8cca449280c02e5bb3a580f8f930062e)': dependencies: - '@antfu/eslint-config': 7.7.3(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.56.1(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@typescript-eslint/typescript-estree@8.57.2(typescript@6.0.2))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@unocss/eslint-plugin@66.6.7(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@vue/compiler-sfc@3.5.26)(eslint-plugin-format@2.0.1(eslint@10.1.0(jiti@2.6.1)))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))) + '@antfu/eslint-config': 7.7.3(@next/eslint-plugin-next@16.2.1)(@typescript-eslint/rule-tester@8.56.1(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@typescript-eslint/typescript-estree@8.58.0(typescript@6.0.2))(@typescript-eslint/utils@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@unocss/eslint-plugin@66.6.7(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@vue/compiler-sfc@3.5.26)(eslint-plugin-format@2.0.1(eslint@10.1.0(jiti@2.6.1)))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3))) '@eslint/js': 10.0.1(eslint@10.1.0(jiti@2.6.1)) '@next/eslint-plugin-next': 16.2.1 '@unocss/eslint-config': 66.6.7(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) '@vue/eslint-config-prettier': 10.2.0(eslint@10.1.0(jiti@2.6.1))(prettier@3.8.1) - '@vue/eslint-config-typescript': 14.7.0(eslint-plugin-vue@10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@vue/eslint-config-typescript': 14.7.0(eslint-plugin-vue@10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) eslint: 10.1.0(jiti@2.6.1) eslint-plugin-format: 2.0.1(eslint@10.1.0(jiti@2.6.1)) eslint-plugin-prettier: 5.5.5(eslint-config-prettier@10.1.8(eslint@10.1.0(jiti@2.6.1)))(eslint@10.1.0(jiti@2.6.1))(prettier@3.8.1) - eslint-plugin-vue: 10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))) + eslint-plugin-vue: 10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))) prettier: 3.8.1 typescript: 6.0.2 - typescript-eslint: 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + typescript-eslint: 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) '@ts-morph/common@0.28.1': dependencies: - minimatch: 10.2.4 + minimatch: 10.2.5 path-browserify: 1.0.1 tinyglobby: 0.2.15 - '@turbo/darwin-64@2.8.21': + '@turbo/darwin-64@2.9.1': optional: true - '@turbo/darwin-arm64@2.8.21': + '@turbo/darwin-arm64@2.9.1': optional: true - '@turbo/linux-64@2.8.21': + '@turbo/linux-64@2.9.1': optional: true - '@turbo/linux-arm64@2.8.21': + '@turbo/linux-arm64@2.9.1': optional: true - '@turbo/windows-64@2.8.21': + '@turbo/windows-64@2.9.1': optional: true - '@turbo/windows-arm64@2.8.21': + '@turbo/windows-arm64@2.9.1': optional: true '@tybys/wasm-util@0.10.1': @@ -8096,14 +8099,14 @@ snapshots: '@types/use-sync-external-store@0.0.6': {} - '@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)': + '@typescript-eslint/eslint-plugin@8.58.0(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)': dependencies: '@eslint-community/regexpp': 4.12.2 - '@typescript-eslint/parser': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) - '@typescript-eslint/scope-manager': 8.57.2 - '@typescript-eslint/type-utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) - '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) - '@typescript-eslint/visitor-keys': 8.57.2 + '@typescript-eslint/parser': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/scope-manager': 8.58.0 + '@typescript-eslint/type-utils': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/utils': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/visitor-keys': 8.58.0 eslint: 10.1.0(jiti@2.6.1) ignore: 7.0.5 natural-compare: 1.4.0 @@ -8124,12 +8127,12 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)': + '@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)': dependencies: - '@typescript-eslint/scope-manager': 8.57.2 - '@typescript-eslint/types': 8.57.2 - '@typescript-eslint/typescript-estree': 8.57.2(typescript@6.0.2) - '@typescript-eslint/visitor-keys': 8.57.2 + '@typescript-eslint/scope-manager': 8.58.0 + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/typescript-estree': 8.58.0(typescript@6.0.2) + '@typescript-eslint/visitor-keys': 8.58.0 debug: 4.4.3 eslint: 10.1.0(jiti@2.6.1) typescript: 6.0.2 @@ -8138,17 +8141,17 @@ snapshots: '@typescript-eslint/project-service@8.56.1(typescript@6.0.2)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.57.2(typescript@6.0.2) - '@typescript-eslint/types': 8.57.2 + '@typescript-eslint/tsconfig-utils': 8.56.1(typescript@6.0.2) + '@typescript-eslint/types': 8.56.1 debug: 4.4.3 typescript: 6.0.2 transitivePeerDependencies: - supports-color - '@typescript-eslint/project-service@8.57.2(typescript@6.0.2)': + '@typescript-eslint/project-service@8.58.0(typescript@6.0.2)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.57.2(typescript@6.0.2) - '@typescript-eslint/types': 8.57.2 + '@typescript-eslint/tsconfig-utils': 8.58.0(typescript@6.0.2) + '@typescript-eslint/types': 8.58.0 debug: 4.4.3 typescript: 6.0.2 transitivePeerDependencies: @@ -8173,24 +8176,24 @@ snapshots: '@typescript-eslint/types': 8.56.1 '@typescript-eslint/visitor-keys': 8.56.1 - '@typescript-eslint/scope-manager@8.57.2': + '@typescript-eslint/scope-manager@8.58.0': dependencies: - '@typescript-eslint/types': 8.57.2 - '@typescript-eslint/visitor-keys': 8.57.2 + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/visitor-keys': 8.58.0 '@typescript-eslint/tsconfig-utils@8.56.1(typescript@6.0.2)': dependencies: typescript: 6.0.2 - '@typescript-eslint/tsconfig-utils@8.57.2(typescript@6.0.2)': + '@typescript-eslint/tsconfig-utils@8.58.0(typescript@6.0.2)': dependencies: typescript: 6.0.2 - '@typescript-eslint/type-utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)': + '@typescript-eslint/type-utils@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)': dependencies: - '@typescript-eslint/types': 8.57.2 - '@typescript-eslint/typescript-estree': 8.57.2(typescript@6.0.2) - '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/typescript-estree': 8.58.0(typescript@6.0.2) + '@typescript-eslint/utils': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) debug: 4.4.3 eslint: 10.1.0(jiti@2.6.1) ts-api-utils: 2.5.0(typescript@6.0.2) @@ -8200,7 +8203,7 @@ snapshots: '@typescript-eslint/types@8.56.1': {} - '@typescript-eslint/types@8.57.2': {} + '@typescript-eslint/types@8.58.0': {} '@typescript-eslint/typescript-estree@8.56.1(typescript@6.0.2)': dependencies: @@ -8209,7 +8212,7 @@ snapshots: '@typescript-eslint/types': 8.56.1 '@typescript-eslint/visitor-keys': 8.56.1 debug: 4.4.3 - minimatch: 10.2.4 + minimatch: 10.2.5 semver: 7.7.4 tinyglobby: 0.2.15 ts-api-utils: 2.5.0(typescript@6.0.2) @@ -8217,14 +8220,14 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/typescript-estree@8.57.2(typescript@6.0.2)': + '@typescript-eslint/typescript-estree@8.58.0(typescript@6.0.2)': dependencies: - '@typescript-eslint/project-service': 8.57.2(typescript@6.0.2) - '@typescript-eslint/tsconfig-utils': 8.57.2(typescript@6.0.2) - '@typescript-eslint/types': 8.57.2 - '@typescript-eslint/visitor-keys': 8.57.2 + '@typescript-eslint/project-service': 8.58.0(typescript@6.0.2) + '@typescript-eslint/tsconfig-utils': 8.58.0(typescript@6.0.2) + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/visitor-keys': 8.58.0 debug: 4.4.3 - minimatch: 10.2.4 + minimatch: 10.2.5 semver: 7.7.4 tinyglobby: 0.2.15 ts-api-utils: 2.5.0(typescript@6.0.2) @@ -8243,12 +8246,12 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)': + '@typescript-eslint/utils@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)': dependencies: '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1)) - '@typescript-eslint/scope-manager': 8.57.2 - '@typescript-eslint/types': 8.57.2 - '@typescript-eslint/typescript-estree': 8.57.2(typescript@6.0.2) + '@typescript-eslint/scope-manager': 8.58.0 + '@typescript-eslint/types': 8.58.0 + '@typescript-eslint/typescript-estree': 8.58.0(typescript@6.0.2) eslint: 10.1.0(jiti@2.6.1) typescript: 6.0.2 transitivePeerDependencies: @@ -8259,9 +8262,9 @@ snapshots: '@typescript-eslint/types': 8.56.1 eslint-visitor-keys: 5.0.1 - '@typescript-eslint/visitor-keys@8.57.2': + '@typescript-eslint/visitor-keys@8.58.0': dependencies: - '@typescript-eslint/types': 8.57.2 + '@typescript-eslint/types': 8.58.0 eslint-visitor-keys: 5.0.1 '@typescript/vfs@1.6.4(typescript@6.0.2)': @@ -8292,7 +8295,7 @@ snapshots: '@unocss/eslint-plugin@66.6.7(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)': dependencies: - '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/utils': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) '@unocss/config': 66.6.7 '@unocss/core': 66.6.7 '@unocss/rule-utils': 66.6.7 @@ -8332,13 +8335,13 @@ snapshots: tinyrainbow: 3.1.0 vitest: 4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) - '@vitest/eslint-plugin@1.6.13(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)))': + '@vitest/eslint-plugin@1.6.13(@typescript-eslint/eslint-plugin@8.58.0(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)(vitest@4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)))': dependencies: - '@typescript-eslint/scope-manager': 8.57.2 - '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/scope-manager': 8.58.0 + '@typescript-eslint/utils': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) eslint: 10.1.0(jiti@2.6.1) optionalDependencies: - '@typescript-eslint/eslint-plugin': 8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/eslint-plugin': 8.58.0(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) typescript: 6.0.2 vitest: 4.1.2(@types/node@25.5.0)(vite@8.0.3(@emnapi/core@1.9.1)(@emnapi/runtime@1.9.1)(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(tsx@4.21.0)(yaml@2.8.3)) transitivePeerDependencies: @@ -8424,13 +8427,13 @@ snapshots: transitivePeerDependencies: - '@types/eslint' - '@vue/eslint-config-typescript@14.7.0(eslint-plugin-vue@10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)': + '@vue/eslint-config-typescript@14.7.0(eslint-plugin-vue@10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2)': dependencies: - '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/utils': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) eslint: 10.1.0(jiti@2.6.1) - eslint-plugin-vue: 10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))) + eslint-plugin-vue: 10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))) fast-glob: 3.3.3 - typescript-eslint: 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + typescript-eslint: 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) vue-eslint-parser: 10.4.0(eslint@10.1.0(jiti@2.6.1)) optionalDependencies: typescript: 6.0.2 @@ -8561,7 +8564,7 @@ snapshots: dependencies: baseline-browser-mapping: 2.10.12 caniuse-lite: 1.0.30001782 - electron-to-chromium: 1.5.328 + electron-to-chromium: 1.5.329 node-releases: 2.0.36 update-browserslist-db: 1.2.3(browserslist@4.28.1) @@ -8961,7 +8964,7 @@ snapshots: ee-first@1.1.1: {} - electron-to-chromium@1.5.328: {} + electron-to-chromium@1.5.329: {} emnapi@1.9.1: {} @@ -9083,12 +9086,12 @@ snapshots: dependencies: eslint: 10.1.0(jiti@2.6.1) - eslint-plugin-command@3.5.2(@typescript-eslint/rule-tester@8.56.1(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@typescript-eslint/typescript-estree@8.57.2(typescript@6.0.2))(@typescript-eslint/utils@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1)): + eslint-plugin-command@3.5.2(@typescript-eslint/rule-tester@8.56.1(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(@typescript-eslint/typescript-estree@8.58.0(typescript@6.0.2))(@typescript-eslint/utils@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1)): dependencies: '@es-joy/jsdoccomment': 0.84.0 '@typescript-eslint/rule-tester': 8.56.1(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) - '@typescript-eslint/typescript-estree': 8.57.2(typescript@6.0.2) - '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/typescript-estree': 8.58.0(typescript@6.0.2) + '@typescript-eslint/utils': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) eslint: 10.1.0(jiti@2.6.1) eslint-plugin-depend@1.5.0(eslint@10.1.0(jiti@2.6.1)): @@ -9176,7 +9179,7 @@ snapshots: eslint-plugin-perfectionist@5.7.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2): dependencies: - '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/utils': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) eslint: 10.1.0(jiti@2.6.1) natural-orderby: 5.0.0 transitivePeerDependencies: @@ -9245,13 +9248,13 @@ snapshots: semver: 7.7.4 strip-indent: 4.1.1 - eslint-plugin-unused-imports@4.4.1(@typescript-eslint/eslint-plugin@8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1)): + eslint-plugin-unused-imports@4.4.1(@typescript-eslint/eslint-plugin@8.58.0(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1)): dependencies: eslint: 10.1.0(jiti@2.6.1) optionalDependencies: - '@typescript-eslint/eslint-plugin': 8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/eslint-plugin': 8.58.0(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) - eslint-plugin-vue@10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))): + eslint-plugin-vue@10.8.0(@stylistic/eslint-plugin@5.10.0(eslint@10.1.0(jiti@2.6.1)))(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(vue-eslint-parser@10.4.0(eslint@10.1.0(jiti@2.6.1))): dependencies: '@eslint-community/eslint-utils': 4.9.1(eslint@10.1.0(jiti@2.6.1)) eslint: 10.1.0(jiti@2.6.1) @@ -9263,7 +9266,7 @@ snapshots: xml-name-validator: 4.0.0 optionalDependencies: '@stylistic/eslint-plugin': 5.10.0(eslint@10.1.0(jiti@2.6.1)) - '@typescript-eslint/parser': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/parser': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) eslint-plugin-yml@3.3.1(eslint@10.1.0(jiti@2.6.1)): dependencies: @@ -9326,7 +9329,7 @@ snapshots: imurmurhash: 0.1.4 is-glob: 4.0.3 json-stable-stringify-without-jsonify: 1.0.1 - minimatch: 10.2.4 + minimatch: 10.2.5 natural-compare: 1.4.0 optionator: 0.9.4 optionalDependencies: @@ -9429,7 +9432,7 @@ snapshots: expect-type@1.3.0: {} - express-rate-limit@8.3.1(express@5.2.1): + express-rate-limit@8.3.2(express@5.2.1): dependencies: express: 5.2.1 ip-address: 10.1.0 @@ -9881,7 +9884,7 @@ snapshots: dependencies: system-architecture: 0.1.0 - isbot@5.1.36: {} + isbot@5.1.37: {} isexe@2.0.0: {} @@ -10070,7 +10073,7 @@ snapshots: marked@16.4.2: {} - material-icon-theme@5.32.0: + material-icon-theme@5.33.1: dependencies: chroma-js: 3.2.0 events: 3.3.0 @@ -10602,7 +10605,7 @@ snapshots: mimic-fn@4.0.0: {} - minimatch@10.2.4: + minimatch@10.2.5: dependencies: brace-expansion: 5.0.5 @@ -10887,7 +10890,7 @@ snapshots: path-key@4.0.0: {} - path-to-regexp@8.4.0: {} + path-to-regexp@8.4.1: {} pathe@2.0.3: {} @@ -11314,7 +11317,7 @@ snapshots: depd: 2.0.0 is-promise: 4.0.0 parseurl: 1.3.3 - path-to-regexp: 8.4.0 + path-to-regexp: 8.4.1 transitivePeerDependencies: - supports-color @@ -11640,14 +11643,14 @@ snapshots: optionalDependencies: fsevents: 2.3.3 - turbo@2.8.21: + turbo@2.9.1: optionalDependencies: - '@turbo/darwin-64': 2.8.21 - '@turbo/darwin-arm64': 2.8.21 - '@turbo/linux-64': 2.8.21 - '@turbo/linux-arm64': 2.8.21 - '@turbo/windows-64': 2.8.21 - '@turbo/windows-arm64': 2.8.21 + '@turbo/darwin-64': 2.9.1 + '@turbo/darwin-arm64': 2.9.1 + '@turbo/linux-64': 2.9.1 + '@turbo/linux-arm64': 2.9.1 + '@turbo/windows-64': 2.9.1 + '@turbo/windows-arm64': 2.9.1 tw-animate-css@1.4.0: {} @@ -11673,12 +11676,12 @@ snapshots: media-typer: 1.1.0 mime-types: 3.0.2 - typescript-eslint@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2): + typescript-eslint@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2): dependencies: - '@typescript-eslint/eslint-plugin': 8.57.2(@typescript-eslint/parser@8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) - '@typescript-eslint/parser': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) - '@typescript-eslint/typescript-estree': 8.57.2(typescript@6.0.2) - '@typescript-eslint/utils': 8.57.2(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/eslint-plugin': 8.58.0(@typescript-eslint/parser@8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/parser': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) + '@typescript-eslint/typescript-estree': 8.58.0(typescript@6.0.2) + '@typescript-eslint/utils': 8.58.0(eslint@10.1.0(jiti@2.6.1))(typescript@6.0.2) eslint: 10.1.0(jiti@2.6.1) typescript: 6.0.2 transitivePeerDependencies: diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 4e0bcfbe..23f79ba5 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -14,15 +14,15 @@ catalog: '@clack/prompts': ^1.1.0 '@eslint/js': ^10.0.1 '@mdx-js/react': ^3.1.1 - '@modelcontextprotocol/sdk': ^1.28.0 + '@modelcontextprotocol/sdk': ^1.29.0 '@monaco-editor/react': ^4.7.0 '@napi-rs/cli': ^3.6.0 '@next/eslint-plugin-next': ^16.2.1 '@next/mdx': ^16.2.0 '@tailwindcss/vite': ^4.2.2 - '@tanstack/react-router': ^1.168.8 - '@tanstack/router-generator': ^1.166.22 - '@tanstack/router-plugin': ^1.167.9 + '@tanstack/react-router': ^1.168.10 + '@tanstack/router-generator': ^1.166.24 + '@tanstack/router-plugin': ^1.167.12 '@tauri-apps/api': ^2.10.1 '@tauri-apps/cli': ^2.10.1 '@tauri-apps/plugin-shell': ^2.3.5 @@ -55,7 +55,7 @@ catalog: json5: ^2.2.3 lightningcss: ^1.32.0 lucide-react: ^1.7.0 - material-icon-theme: ^5.32.0 + material-icon-theme: ^5.33.1 mdast-util-mdx: ^3.0.0 mermaid: ^11.13.0 monaco-editor: ^0.55.1 @@ -79,10 +79,10 @@ catalog: tailwindcss: ^4.2.2 tsdown: ^0.21.7 tsx: ^4.21.0 - turbo: ^2.8.21 + turbo: ^2.9.1 tw-animate-css: ^1.4.0 typescript: 6.0.2 - typescript-eslint: ^8.57.2 + typescript-eslint: ^8.58.0 unified: ^11.0.5 vite: ^8.0.3 vitest: ^4.1.2 From 5eef91e5dc346474b4f6de5f6a91fda99a9c4928 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Tue, 31 Mar 2026 16:00:41 +0800 Subject: [PATCH 18/27] feat(release): enable npm package installation and add CLI platform package shim generation --- .github/workflows/release-cli.yml | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release-cli.yml b/.github/workflows/release-cli.yml index 5c262982..413ca103 100644 --- a/.github/workflows/release-cli.yml +++ b/.github/workflows/release-cli.yml @@ -239,7 +239,7 @@ jobs: - uses: actions/checkout@v6 - uses: ./.github/actions/setup-node-pnpm with: - install: "false" + install: "true" - name: Setup npm registry uses: actions/setup-node@v6 with: @@ -299,6 +299,16 @@ jobs: echo "Copying from ${artifact_dir} to ${target_dir}" cp "${artifact_dir}"*.node "$target_dir/" || { echo "ERROR: no .node files found in ${artifact_dir}"; exit 1; } done + - name: Generate CLI platform package shims + shell: bash + run: | + shopt -s nullglob + dirs=(cli/npm/*/) + if [ "${#dirs[@]}" -eq 0 ]; then + echo "No CLI platform package directories found" + exit 0 + fi + pnpm exec tsx scripts/write-platform-package-shims.ts "${dirs[@]}" - name: Validate CLI platform packages shell: bash run: | @@ -314,12 +324,12 @@ jobs: if [ ! -f "${target_dir}package.json" ]; then continue fi - if [ ! -f "${target_dir}noop.cjs" ]; then - echo "ERROR: missing ${target_dir}noop.cjs" + if [ ! -f "${target_dir}noop.mjs" ]; then + echo "ERROR: missing ${target_dir}noop.mjs" exit 1 fi - if [ ! -f "${target_dir}noop.d.ts" ]; then - echo "ERROR: missing ${target_dir}noop.d.ts" + if [ ! -f "${target_dir}noop.d.mts" ]; then + echo "ERROR: missing ${target_dir}noop.d.mts" exit 1 fi actual_count=$(find "${target_dir}" -maxdepth 1 -type f -name '*.node' | wc -l | tr -d ' ') From a9fa7b4bfe43decdf01c1680f9c47500a2d7f91f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Tue, 31 Mar 2026 16:00:50 +0800 Subject: [PATCH 19/27] refactor: modularize platform package shim generation and improve file handling --- scripts/copy-napi.ts | 57 ++--------------- scripts/write-platform-package-shims.ts | 81 +++++++++++++++++++++++++ 2 files changed, 85 insertions(+), 53 deletions(-) create mode 100644 scripts/write-platform-package-shims.ts diff --git a/scripts/copy-napi.ts b/scripts/copy-napi.ts index b0efaddc..279786ba 100644 --- a/scripts/copy-napi.ts +++ b/scripts/copy-napi.ts @@ -1,9 +1,11 @@ #!/usr/bin/env tsx -import {cpSync, existsSync, mkdirSync, readdirSync, writeFileSync} from 'node:fs' +import {cpSync, existsSync, mkdirSync, readdirSync} from 'node:fs' import {dirname, join, resolve} from 'node:path' import {fileURLToPath} from 'node:url' import process from 'node:process' +import {resolveTargetDirs, writePlatformPackageShims} from './write-platform-package-shims' + const NATIVE_MODULES = [ {name: 'logger', distDir: 'libraries/logger/dist'}, {name: 'md-compiler', distDir: 'libraries/md-compiler/dist'}, @@ -23,59 +25,8 @@ const __dirname = dirname(fileURLToPath(import.meta.url)) const root = resolve(__dirname, '..') const suffix = PLATFORM_MAP[`${process.platform}-${process.arch}`] -const PLATFORM_PACKAGE_SHIM = `'use strict' - -const {readdirSync} = require('node:fs') -const {join} = require('node:path') - -const EXPORT_BINDINGS = [ - ['logger', 'napi-logger.'], - ['mdCompiler', 'napi-md-compiler.'], - ['scriptRuntime', 'napi-script-runtime.'], - ['config', 'napi-memory-sync-cli.'] -] - -const nodeFiles = readdirSync(__dirname).filter(file => file.endsWith('.node')) -const bindings = {} - -for (const [exportName, prefix] of EXPORT_BINDINGS) { - const file = nodeFiles.find(candidate => candidate.startsWith(prefix)) - if (file == null) continue - - Object.defineProperty(bindings, exportName, { - enumerable: true, - get() { - return require(join(__dirname, file)) - } - }) -} - -module.exports = bindings -` - -const PLATFORM_PACKAGE_TYPES = `declare const bindings: { - readonly logger?: unknown - readonly mdCompiler?: unknown - readonly scriptRuntime?: unknown - readonly config?: unknown -} - -export = bindings -` - -function writePlatformPackageShim(targetDir: string): void { - writeFileSync(join(targetDir, 'noop.cjs'), PLATFORM_PACKAGE_SHIM, 'utf8') - writeFileSync(join(targetDir, 'noop.d.ts'), PLATFORM_PACKAGE_TYPES, 'utf8') -} - const npmPackagesDir = join(root, 'cli', 'npm') -const platformPackageDirs = readdirSync(npmPackagesDir, {withFileTypes: true}) - .filter(entry => entry.isDirectory()) - .map(entry => join(npmPackagesDir, entry.name)) - -for (const targetDir of platformPackageDirs) { - writePlatformPackageShim(targetDir) -} +writePlatformPackageShims(resolveTargetDirs([])) if (suffix == null) { console.warn(`[copy-napi] Unsupported platform: ${process.platform}-${process.arch}, wrote package shims only`) diff --git a/scripts/write-platform-package-shims.ts b/scripts/write-platform-package-shims.ts new file mode 100644 index 00000000..31d3bd1e --- /dev/null +++ b/scripts/write-platform-package-shims.ts @@ -0,0 +1,81 @@ +#!/usr/bin/env tsx + +import {mkdirSync, readdirSync, writeFileSync} from 'node:fs' +import {dirname, join, resolve} from 'node:path' +import {fileURLToPath, pathToFileURL} from 'node:url' + +const __dirname = dirname(fileURLToPath(import.meta.url)) +const root = resolve(__dirname, '..') + +export const PLATFORM_PACKAGE_SHIM = `import {readdirSync} from 'node:fs' +import {createRequire} from 'node:module' +import {dirname, join} from 'node:path' +import {fileURLToPath} from 'node:url' + +const require = createRequire(import.meta.url) +const moduleDir = dirname(fileURLToPath(import.meta.url)) +const nodeFiles = readdirSync(moduleDir).filter(file => file.endsWith('.node')) + +function loadBinding(prefix) { + const file = nodeFiles.find(candidate => candidate.startsWith(prefix)) + return file == null ? undefined : require(join(moduleDir, file)) +} + +export const logger = loadBinding('napi-logger.') +export const mdCompiler = loadBinding('napi-md-compiler.') +export const scriptRuntime = loadBinding('napi-script-runtime.') +export const config = loadBinding('napi-memory-sync-cli.') + +const bindings = {logger, mdCompiler, scriptRuntime, config} + +export default bindings +` + +export const PLATFORM_PACKAGE_TYPES = `export declare const logger: unknown | undefined +export declare const mdCompiler: unknown | undefined +export declare const scriptRuntime: unknown | undefined +export declare const config: unknown | undefined + +declare const bindings: { + readonly logger: typeof logger + readonly mdCompiler: typeof mdCompiler + readonly scriptRuntime: typeof scriptRuntime + readonly config: typeof config +} + +export default bindings +` + +export function resolveTargetDirs(args: readonly string[]): string[] { + if (args.length > 0) { + return args.map(targetDir => resolve(process.cwd(), targetDir)) + } + + const npmPackagesDir = join(root, 'cli', 'npm') + return readdirSync(npmPackagesDir, {withFileTypes: true}) + .filter(entry => entry.isDirectory()) + .map(entry => join(npmPackagesDir, entry.name)) +} + +export function writePlatformPackageShim(targetDir: string): void { + mkdirSync(targetDir, {recursive: true}) + writeFileSync(join(targetDir, 'noop.mjs'), PLATFORM_PACKAGE_SHIM, 'utf8') + writeFileSync(join(targetDir, 'noop.d.mts'), PLATFORM_PACKAGE_TYPES, 'utf8') +} + +export function writePlatformPackageShims(targetDirs: readonly string[]): void { + for (const targetDir of targetDirs) { + writePlatformPackageShim(targetDir) + } +} + +function isEntrypoint(): boolean { + const entryArg = process.argv[1] + if (entryArg == null) return false + + return import.meta.url === pathToFileURL(entryArg).href +} + +if (isEntrypoint()) { + writePlatformPackageShims(resolveTargetDirs(process.argv.slice(2))) +} From 0b528e21de7632c82d758327a23df0787d7e0e94 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Tue, 31 Mar 2026 16:01:13 +0800 Subject: [PATCH 20/27] refactor: update ESLint ignore patterns and reorder native binding candidates --- sdk/eslint.config.ts | 4 ++-- sdk/src/core/native-binding.ts | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sdk/eslint.config.ts b/sdk/eslint.config.ts index 9c891393..e6caf052 100644 --- a/sdk/eslint.config.ts +++ b/sdk/eslint.config.ts @@ -17,8 +17,8 @@ const config = await eslint10({ ignores: [ '.turbo/**', 'aindex/**', - 'npm/**/noop.cjs', - 'npm/**/noop.d.ts', + 'npm/**/noop.mjs', + 'npm/**/noop.d.mts', '*.md', '**/*.md', '*.toml', diff --git a/sdk/src/core/native-binding.ts b/sdk/src/core/native-binding.ts index d761cef8..deaf2c50 100644 --- a/sdk/src/core/native-binding.ts +++ b/sdk/src/core/native-binding.ts @@ -29,13 +29,13 @@ export function tryLoadNativeBinding(): T | undefined { const packageName = `@truenine/memory-sync-cli-${suffix}` const binaryFile = `napi-memory-sync-cli.${suffix}.node` const candidates = [ - packageName, `${packageName}/${binaryFile}`, `./${binaryFile}`, - `../npm/${suffix}`, `../npm/${suffix}/${binaryFile}`, - `../../npm/${suffix}`, - `../../npm/${suffix}/${binaryFile}` + `../../npm/${suffix}/${binaryFile}`, + packageName, + `../npm/${suffix}`, + `../../npm/${suffix}` ] for (const specifier of candidates) { From 11fb34ca30c2771bbcdb6efffa5587fd2645e154 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Tue, 31 Mar 2026 21:34:11 +0800 Subject: [PATCH 21/27] Fix docs MDX hydration and code block styling --- doc/app/docs/{ => [[...mdxPath]]}/layout.tsx | 36 +- doc/app/docs/[[...mdxPath]]/page.tsx | 2 +- doc/app/docs/[section]/[[...rest]]/page.tsx | 74 - doc/app/docs/[section]/layout.tsx | 72 - doc/app/globals.css | 1509 +++++------------- doc/app/home-page.mdx | 52 +- doc/app/layout.tsx | 62 +- doc/components/docs-code-block.tsx | 111 ++ doc/components/docs-section-nav.tsx | 11 +- doc/lib/docs-sections.ts | 1 + doc/mdx-components.tsx | 83 +- 11 files changed, 672 insertions(+), 1341 deletions(-) rename doc/app/docs/{ => [[...mdxPath]]}/layout.tsx (70%) delete mode 100644 doc/app/docs/[section]/[[...rest]]/page.tsx delete mode 100644 doc/app/docs/[section]/layout.tsx create mode 100644 doc/components/docs-code-block.tsx diff --git a/doc/app/docs/layout.tsx b/doc/app/docs/[[...mdxPath]]/layout.tsx similarity index 70% rename from doc/app/docs/layout.tsx rename to doc/app/docs/[[...mdxPath]]/layout.tsx index 786d3d7a..ce8c1e2b 100644 --- a/doc/app/docs/layout.tsx +++ b/doc/app/docs/[[...mdxPath]]/layout.tsx @@ -1,18 +1,23 @@ import type {ReactNode} from 'react' -import Link from 'next/link' import {Footer, Layout, Navbar} from 'nextra-theme-docs' import {getPageMap} from 'nextra/page-map' -import {siteConfig} from '../../lib/site' +import {DocsSectionNav} from '../../../components/docs-section-nav' +import {isDocSectionName} from '../../../lib/docs-sections' +import {siteConfig} from '../../../lib/site' -export default async function DocsLayout({children}: {readonly children: ReactNode}) { - const pageMap = await getPageMap('/docs') - const sectionLinks = [ - {href: '/docs/cli', label: 'CLI'}, - {href: '/docs/mcp', label: 'MCP'}, - {href: '/docs/gui', label: 'GUI'}, - {href: '/docs/technical-details', label: '技术细节'}, - {href: '/docs/design-rationale', label: '设计初衷'} - ] as const +export default async function DocsLayout({ + children, + params: paramsPromise +}: { + readonly children: ReactNode + readonly params: Promise<{readonly mdxPath?: string[]}> +}) { + const params = await paramsPromise + const firstSegment = params.mdxPath?.[0] + const section = firstSegment != null && isDocSectionName(firstSegment) + ? firstSegment + : undefined + const pageMap = await getPageMap(section ? `/docs/${section}` : '/docs') return (
- - +
GitHub diff --git a/doc/app/docs/[[...mdxPath]]/page.tsx b/doc/app/docs/[[...mdxPath]]/page.tsx index 782b76d2..4c4786a6 100644 --- a/doc/app/docs/[[...mdxPath]]/page.tsx +++ b/doc/app/docs/[[...mdxPath]]/page.tsx @@ -36,7 +36,7 @@ export default async function DocsPage(props: { sourceCode } = await importPage(params.mdxPath) - const page = + const page = if (!Wrapper) { return page diff --git a/doc/app/docs/[section]/[[...rest]]/page.tsx b/doc/app/docs/[section]/[[...rest]]/page.tsx deleted file mode 100644 index 8e7b3748..00000000 --- a/doc/app/docs/[section]/[[...rest]]/page.tsx +++ /dev/null @@ -1,74 +0,0 @@ -import type {ComponentType, ReactNode} from 'react' -import {notFound} from 'next/navigation' -import {generateStaticParamsFor, importPage} from 'nextra/pages' -import {isDocSectionName} from '../../../../lib/docs-sections' -import {useMDXComponents as getMDXComponents} from '../../../../mdx-components' - -const getAllDocParams = generateStaticParamsFor('mdxPath') - -function isSectionDocParam( - value: {mdxPath?: string[]} -): value is {mdxPath: [string, ...string[]]} { - return value.mdxPath != null - && value.mdxPath.length > 0 - && isDocSectionName(value.mdxPath[0]) -} - -export async function generateStaticParams() { - const allParams = await getAllDocParams() - return (allParams as {mdxPath?: string[]}[]) - .filter(isSectionDocParam) - .map(p => ({ - section: p.mdxPath[0], - rest: p.mdxPath.length > 1 ? p.mdxPath.slice(1) : void 0 - })) -} - -export async function generateMetadata(props: { - readonly params: Promise<{readonly section: string, readonly rest?: string[]}> -}) { - const params = await props.params - if (!isDocSectionName(params.section)) notFound() - const mdxPath = [params.section, ...params.rest ?? []] - const {metadata} = await importPage(mdxPath) - return metadata -} - -interface WrapperProps { - readonly children: ReactNode - readonly metadata: unknown - readonly sourceCode: string - readonly toc: unknown -} - -const components = getMDXComponents() as { - readonly wrapper?: ComponentType -} - -const Wrapper = components.wrapper - -export default async function SectionPage(props: { - readonly params: Promise<{readonly section: string, readonly rest?: string[]}> -}) { - const params = await props.params - if (!isDocSectionName(params.section)) notFound() - const mdxPath = [params.section, ...params.rest ?? []] - const { - default: MDXContent, - toc, - metadata, - sourceCode - } = await importPage(mdxPath) - - const page = - - if (!Wrapper) { - return page - } - - return ( - - {page} - - ) -} diff --git a/doc/app/docs/[section]/layout.tsx b/doc/app/docs/[section]/layout.tsx deleted file mode 100644 index c2c6ab49..00000000 --- a/doc/app/docs/[section]/layout.tsx +++ /dev/null @@ -1,72 +0,0 @@ -import type {ReactNode} from 'react' -import {notFound} from 'next/navigation' -import {Footer, Layout, Navbar} from 'nextra-theme-docs' -import {getPageMap} from 'nextra/page-map' -import {DocsSectionNav} from '../../../components/docs-section-nav' -import {isDocSectionName} from '../../../lib/docs-sections' -import {siteConfig} from '../../../lib/site' - -export default async function SectionLayout({ - children, - params -}: { - readonly children: ReactNode - readonly params: Promise<{readonly section: string}> -}) { - const {section} = await params - if (!isDocSectionName(section)) notFound() - const pageMap = await getPageMap(`/docs/${section}`) - - return ( - - memory-sync -
- )} - > - - - )} - footer={ -
AGPL-3.0-only · 面向当前仓库实现、命令表面与配置边界
- } - docsRepositoryBase={`${siteConfig.docsRepositoryBase}/content`} - editLink="在 GitHub 上编辑此页" - feedback={{ - content: '有遗漏或过时信息?提交 issue', - link: siteConfig.issueUrl, - labels: 'documentation' - }} - sidebar={{ - autoCollapse: false, - defaultMenuCollapseLevel: 99, - defaultOpen: true, - toggleButton: false - }} - toc={{ - float: true, - title: '本页目录', - backToTop: '回到顶部' - }} - themeSwitch={{ - dark: '暗色', - light: '亮色', - system: '系统' - }} - nextThemes={{ - attribute: 'class', - defaultTheme: 'dark', - disableTransitionOnChange: true, - storageKey: 'memory-sync-docs-theme' - }} - > - {children} - - ) -} diff --git a/doc/app/globals.css b/doc/app/globals.css index 9a3776fb..be571387 100644 --- a/doc/app/globals.css +++ b/doc/app/globals.css @@ -2,63 +2,61 @@ html.dark { color-scheme: dark; --nextra-content-width: 1380px; - --page-bg: #0b0c10; - --page-fg: #fafafa; - --page-fg-soft: #b8bec7; - --page-fg-muted: #8b919a; - --surface: rgba(18, 20, 24, 0.82); - --surface-strong: #101116; - --surface-muted: #14161b; - --surface-subtle: #171a20; - --surface-elevated: #1a1d24; - --surface-overlay: rgba(20, 22, 28, 0.92); - --surface-border: rgba(255, 255, 255, 0.04); - --surface-border-strong: rgba(255, 255, 255, 0.07); - --surface-separator: rgba(255, 255, 255, 0.055); - --surface-highlight: rgba(255, 255, 255, 0.025); - --surface-highlight-strong: rgba(255, 255, 255, 0.045); - --inline-code-bg: rgba(255, 255, 255, 0.032); - --shadow-sm: 0 10px 28px rgba(0, 0, 0, 0.16); - --shadow-md: 0 26px 76px rgba(0, 0, 0, 0.24); - --hero-glow: radial-gradient(circle at top, rgba(255, 255, 255, 0.045), transparent 58%); - --page-gradient: - radial-gradient(circle at top, rgba(255, 255, 255, 0.024), transparent 34%), - linear-gradient(180deg, #0a0b0f 0%, #0d1015 100%); - --button-primary-bg: #fafafa; - --button-primary-fg: #111111; - --button-secondary-bg: rgba(255, 255, 255, 0.028); - --button-secondary-fg: #fafafa; + --page-bg: #000000; + --page-fg: #ffffff; + --page-fg-soft: #a1a1a1; + --page-fg-muted: #666666; + --surface: rgba(0, 0, 0, 0.8); + --surface-strong: #000000; + --surface-muted: #111111; + --surface-subtle: #171717; + --surface-elevated: #1f1f1f; + --surface-overlay: rgba(0, 0, 0, 0.9); + --surface-border: rgba(255, 255, 255, 0.1); + --surface-border-strong: rgba(255, 255, 255, 0.15); + --surface-separator: rgba(255, 255, 255, 0.08); + --surface-highlight: rgba(255, 255, 255, 0.03); + --surface-highlight-strong: rgba(255, 255, 255, 0.06); + --inline-code-bg: rgba(255, 255, 255, 0.05); + --shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.1); + --shadow-md: 0 12px 40px rgba(0, 0, 0, 0.2); + --hero-glow: radial-gradient(circle at top, rgba(255, 255, 255, 0.05), transparent 60%); + --page-gradient: linear-gradient(180deg, #000000 0%, #000000 100%); + --button-primary-bg: #ffffff; + --button-primary-fg: #000000; + --button-secondary-bg: rgba(255, 255, 255, 0.05); + --button-secondary-fg: #ffffff; + --accent: #ffffff; } html.light { color-scheme: light; --nextra-content-width: 1380px; --page-bg: #ffffff; - --page-fg: #111111; - --page-fg-soft: #5f6670; - --page-fg-muted: #7b818b; + --page-fg: #000000; + --page-fg-soft: #666666; + --page-fg-muted: #888888; --surface: rgba(255, 255, 255, 0.9); --surface-strong: #ffffff; - --surface-muted: #f6f7f8; - --surface-subtle: #f3f4f6; - --surface-elevated: #f8f9fb; - --surface-overlay: rgba(255, 255, 255, 0.94); - --surface-border: rgba(17, 17, 17, 0.08); - --surface-border-strong: rgba(17, 17, 17, 0.14); - --surface-separator: rgba(17, 17, 17, 0.06); - --surface-highlight: rgba(17, 17, 17, 0.035); - --surface-highlight-strong: rgba(17, 17, 17, 0.055); - --inline-code-bg: rgba(17, 17, 17, 0.035); - --shadow-sm: 0 1px 2px rgba(15, 23, 42, 0.05); - --shadow-md: 0 12px 40px rgba(15, 23, 42, 0.06); - --hero-glow: radial-gradient(circle at top, rgba(0, 0, 0, 0.06), transparent 58%); - --page-gradient: - radial-gradient(circle at top, rgba(0, 0, 0, 0.04), transparent 30%), - linear-gradient(180deg, #ffffff 0%, #fbfbfc 100%); - --button-primary-bg: #111111; + --surface-muted: #fafafa; + --surface-subtle: #f5f5f5; + --surface-elevated: #f0f0f0; + --surface-overlay: rgba(255, 255, 255, 0.95); + --surface-border: rgba(0, 0, 0, 0.08); + --surface-border-strong: rgba(0, 0, 0, 0.12); + --surface-separator: rgba(0, 0, 0, 0.06); + --surface-highlight: rgba(0, 0, 0, 0.03); + --surface-highlight-strong: rgba(0, 0, 0, 0.05); + --inline-code-bg: rgba(0, 0, 0, 0.05); + --shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.05); + --shadow-md: 0 12px 40px rgba(0, 0, 0, 0.06); + --hero-glow: radial-gradient(circle at top, rgba(0, 0, 0, 0.04), transparent 60%); + --page-gradient: linear-gradient(180deg, #ffffff 0%, #ffffff 100%); + --button-primary-bg: #000000; --button-primary-fg: #ffffff; - --button-secondary-bg: rgba(255, 255, 255, 0.82); - --button-secondary-fg: #111111; + --button-secondary-bg: rgba(0, 0, 0, 0.05); + --button-secondary-fg: #000000; + --accent: #000000; } *, @@ -75,10 +73,14 @@ body { color: var(--page-fg); font-family: var(--font-sans), - 'PingFang SC', - 'Hiragino Sans GB', - 'Microsoft YaHei', - 'Noto Sans SC', + ui-sans-serif, + system-ui, + -apple-system, + BlinkMacSystemFont, + 'Segoe UI', + Roboto, + 'Helvetica Neue', + Arial, sans-serif; } @@ -120,7 +122,7 @@ samp { } .docs-home { - padding: 20px 0 88px; + padding: 40px 0 88px; } .home-topbar { @@ -128,31 +130,37 @@ samp { align-items: center; justify-content: space-between; gap: 20px; - padding: 6px 0 18px; + padding: 0 0 24px; + border-bottom: 1px solid var(--surface-border); + margin-bottom: 48px; } .home-brand { - display: inline-flex; - align-items: baseline; - gap: 10px; -} - -.docs-brand { display: inline-flex; align-items: center; - gap: 10px; + gap: 12px; } .home-brand strong { - font-size: 0.95rem; - font-weight: 600; + font-size: 1.1rem; + font-weight: 700; letter-spacing: -0.02em; } .home-brand span { color: var(--page-fg-muted); - font-size: 0.78rem; - letter-spacing: 0.02em; + font-size: 0.8rem; + font-weight: 500; + padding: 2px 8px; + border: 1px solid var(--surface-border); + border-radius: 999px; + background: var(--surface-subtle); +} + +.docs-brand { + display: inline-flex; + align-items: center; + gap: 10px; } .docs-brand-badge { @@ -162,1294 +170,589 @@ samp { min-height: 24px; padding: 0 8px; border: 1px solid var(--surface-border); - border-radius: 999px; - background: color-mix(in srgb, var(--surface-strong) 82%, transparent); + border-radius: 6px; + background: var(--surface-subtle); color: var(--page-fg-muted); font-size: 0.68rem; - font-weight: 700; - letter-spacing: 0.08em; - text-transform: uppercase; + font-weight: 600; + letter-spacing: 0.02em; } .docs-brand-title { - font-size: 0.95rem; + font-size: 1rem; font-weight: 600; - letter-spacing: -0.02em; + letter-spacing: -0.01em; } .home-topbar-nav { display: inline-flex; align-items: center; - gap: 8px; + gap: 12px; } .home-topbar-nav a, .docs-nav-link { display: inline-flex; align-items: center; - min-height: 34px; - padding: 0 11px; - border: 1px solid transparent; - border-radius: 999px; - color: var(--page-fg-muted); - font-size: 0.86rem; - transition: - border-color 0.2s ease, - background-color 0.2s ease, - color 0.2s ease, - transform 0.2s ease; -} - -.docs-site-navbar nav:not(.docs-navbar-links) { - gap: 14px; -} - -.docs-site-navbar nav:not(.docs-navbar-links) > :nth-child(2) { - display: none; -} - -.docs-site-navbar nav:not(.docs-navbar-links) > :nth-child(3) { - order: 3; + min-height: 36px; + padding: 0 12px; + border-radius: 6px; + color: var(--page-fg-soft); + font-size: 0.9rem; + font-weight: 500; + transition: all 0.2s ease; } -.docs-site-navbar nav:not(.docs-navbar-links) > :nth-child(4) { - order: 2; +.home-topbar-nav a:hover, +.docs-nav-link:hover { + background: var(--surface-highlight); + color: var(--page-fg); } -.docs-site-navbar nav:not(.docs-navbar-links) > :nth-child(5) { - order: 4; +.docs-nav-link.is-active { + color: var(--page-fg); + background: var(--surface-highlight-strong); } .docs-navbar-shell { - display: inline-flex; - flex: 1 1 auto; + display: flex; + flex: 1; align-items: center; justify-content: flex-end; - gap: 12px; - min-width: 0; - overflow: hidden; + gap: 16px; } .docs-navbar-links { - display: inline-flex; + display: flex; align-items: center; - flex-wrap: nowrap; - justify-content: flex-end; - gap: 8px; - min-width: 0; - overflow-x: auto; - padding-bottom: 2px; + gap: 4px; } .docs-navbar-actions { - display: inline-flex; + display: flex; align-items: center; - gap: 10px; + gap: 8px; + margin-left: 8px; + padding-left: 16px; + border-left: 1px solid var(--surface-separator); } .docs-navbar-action { display: inline-flex; align-items: center; justify-content: center; - min-height: 36px; + min-height: 34px; padding: 0 12px; border: 1px solid var(--surface-border); - border-radius: 999px; - background: color-mix(in srgb, var(--surface-strong) 84%, transparent); + border-radius: 6px; + background: var(--surface-strong); color: var(--page-fg-soft); font-size: 0.84rem; font-weight: 500; - transition: - border-color 0.2s ease, - background-color 0.2s ease, - color 0.2s ease, - transform 0.2s ease; + transition: all 0.2s ease; } -.home-topbar-nav a:hover, -.docs-nav-link:hover, .docs-navbar-action:hover { - border-color: var(--surface-border); - background: var(--surface); + border-color: var(--surface-border-strong); color: var(--page-fg); - transform: translateY(-1px); } .home-hero { - padding: 18px 0 8px; + padding: 0 0 64px; } -.home-hero-copy, -.home-link-card, -.capability-card, -.reading-path-card, -.not-found-shell { - position: relative; - overflow: hidden; - border: 1px solid var(--surface-border); - border-radius: 24px; - background: var(--surface); - box-shadow: var(--shadow-sm); +.home-hero-copy { + padding: 0; + background: transparent; + box-shadow: none; } -.home-hero-copy { - padding: 34px 34px 30px; - background-image: - linear-gradient(180deg, color-mix(in srgb, var(--surface-strong) 92%, transparent), var(--surface)), - var(--hero-glow); - box-shadow: var(--shadow-md); -} - -.home-hero-copy::before, -.home-link-card::before, -.capability-card::before, -.reading-path-card::before { - content: ''; - position: absolute; - inset: 0 auto auto 0; - width: 100%; - height: 1px; - background: linear-gradient(90deg, transparent, var(--surface-border-strong), transparent); +.home-hero-copy::before { + display: none; } .section-kicker { - margin: 0 0 10px; + margin: 0 0 16px; color: var(--page-fg-muted); - font-size: 0.72rem; + font-size: 0.85rem; font-weight: 600; - letter-spacing: 0.08em; + letter-spacing: 0.02em; text-transform: uppercase; } -.home-hero h1, -.section-heading h2, -.not-found-shell h1 { - margin: 0; - letter-spacing: -0.045em; -} - .home-hero h1 { - max-width: 13ch; - font-size: clamp(1.95rem, 3.2vw, 2.9rem); - line-height: 0.98; + margin: 0; + font-size: clamp(2.5rem, 5vw, 4rem); + font-weight: 800; + letter-spacing: -0.04em; + line-height: 1.1; + color: var(--page-fg); } .home-hero-lead { - max-width: 52rem; - margin: 16px 0 0; + max-width: 48rem; + margin: 24px 0 0; color: var(--page-fg-soft); - font-size: 0.97rem; - line-height: 1.72; + font-size: 1.15rem; + line-height: 1.6; } -.home-actions, -.not-found-actions { +.home-actions { display: flex; flex-wrap: wrap; gap: 12px; - margin-top: 22px; + margin-top: 32px; } .hero-button { display: inline-flex; align-items: center; justify-content: center; - min-height: 40px; - padding: 0 15px; - border: 1px solid var(--surface-border-strong); - border-radius: 999px; - font-size: 0.88rem; - font-weight: 500; - transition: - border-color 0.2s ease, - background-color 0.2s ease, - color 0.2s ease, - transform 0.2s ease, - box-shadow 0.2s ease; -} - -.hero-button:hover { - transform: translateY(-1px); + min-height: 44px; + padding: 0 24px; + border-radius: 8px; + font-size: 0.95rem; + font-weight: 600; + transition: all 0.2s ease; } .hero-button-primary { - border-color: var(--button-primary-bg); background: var(--button-primary-bg); color: var(--button-primary-fg); + border: 1px solid var(--button-primary-bg); } .hero-button-primary:hover { - box-shadow: 0 10px 20px rgba(0, 0, 0, 0.08); - opacity: 0.96; + opacity: 0.9; + transform: translateY(-1px); } .hero-button-secondary { background: var(--button-secondary-bg); color: var(--button-secondary-fg); + border: 1px solid var(--surface-border); } .hero-button-secondary:hover { - background: var(--surface-subtle); -} - -.home-proof-strip, -.home-link-grid, -.capability-grid, -.reading-path-grid { - display: grid; - gap: 14px; + background: var(--surface-highlight); + border-color: var(--surface-border-strong); + transform: translateY(-1px); } .home-proof-strip { - grid-template-columns: repeat(3, minmax(0, 1fr)); - margin: 24px 0 0; + display: grid; + grid-template-columns: repeat(3, 1fr); + gap: 24px; + margin: 64px 0 0; padding: 0; list-style: none; } .proof-pill { - padding: 14px 16px; + padding: 24px; border: 1px solid var(--surface-border); - border-radius: 18px; - background: color-mix(in srgb, var(--surface-strong) 74%, transparent); + border-radius: 12px; + background: var(--surface-subtle); } -.proof-pill span, -.home-link-card span, -.capability-card span, -.reading-path-card small { +.proof-pill span { color: var(--page-fg-muted); - font-size: 0.74rem; + font-size: 0.75rem; font-weight: 600; - letter-spacing: 0.07em; + letter-spacing: 0.05em; text-transform: uppercase; } -.proof-pill strong, -.home-link-card strong, -.capability-card h3, -.reading-path-card strong { +.proof-pill strong { display: block; - margin: 8px 0 0; - font-size: 0.98rem; - font-weight: 600; - letter-spacing: -0.02em; + margin: 12px 0 0; + font-size: 1.25rem; + font-weight: 700; + color: var(--page-fg); } .home-section { - padding-top: 34px; + padding-top: 80px; } .section-heading { - display: flex; - flex-direction: column; - gap: 8px; - margin-bottom: 18px; + margin-bottom: 40px; } .section-heading h2 { - max-width: 22ch; - font-size: clamp(1.45rem, 2.2vw, 1.9rem); - line-height: 1.06; + font-size: 2.25rem; + font-weight: 700; + letter-spacing: -0.02em; + margin: 0; } .section-summary { max-width: 44rem; - margin: 0; + margin: 16px 0 0; color: var(--page-fg-soft); - font-size: 0.93rem; - line-height: 1.66; -} - -.home-link-grid { - grid-template-columns: repeat(2, minmax(0, 1fr)); + font-size: 1.1rem; + line-height: 1.6; } +.home-link-grid, .capability-grid { - grid-template-columns: repeat(2, minmax(0, 1fr)); + display: grid; + grid-template-columns: repeat(2, 1fr); + gap: 16px; } .reading-path-grid { - grid-template-columns: repeat(4, minmax(0, 1fr)); + display: grid; + grid-template-columns: repeat(4, 1fr); + gap: 16px; } .home-link-card, .capability-card, .reading-path-card { - padding: 20px; - transition: - transform 0.24s ease, - border-color 0.24s ease, - background-color 0.24s ease, - box-shadow 0.24s ease; + padding: 32px; + border: 1px solid var(--surface-border); + border-radius: 12px; + background: var(--surface-strong); + transition: all 0.2s ease; } .home-link-card:hover, .capability-card:hover, .reading-path-card:hover { border-color: var(--surface-border-strong); - background: color-mix(in srgb, var(--surface-strong) 94%, var(--surface-subtle)); - box-shadow: var(--shadow-md); + background: var(--surface-subtle); transform: translateY(-2px); } +.home-link-card strong, +.capability-card h3, +.reading-path-card strong { + display: block; + font-size: 1.2rem; + font-weight: 600; + margin-bottom: 12px; +} + .home-link-card p, .capability-card p, -.reading-path-card p, -.not-found-shell p { - margin: 10px 0 0; +.reading-path-card p { color: var(--page-fg-soft); - font-size: 0.92rem; - line-height: 1.68; + font-size: 0.95rem; + line-height: 1.6; + margin: 0; +} + +.reading-path-card small { + display: inline-block; + margin-bottom: 12px; + color: var(--page-fg-muted); + font-size: 0.75rem; + font-weight: 700; } .home-link-card span { - display: inline-flex; - margin-top: 16px; + display: inline-block; + margin-top: 24px; + font-size: 0.85rem; + font-weight: 600; + color: var(--page-fg-muted); } .capability-card span { - display: inline-flex; + display: inline-block; + margin-bottom: 12px; + color: var(--page-fg-muted); + font-size: 0.75rem; + font-weight: 700; + text-transform: uppercase; } -.reading-path-card small { - display: inline-flex; +.nextra-nav-container-blur, +.nextra-navbar-blur { + backdrop-filter: blur(12px); + background: rgba(0, 0, 0, 0.7) !important; + border-bottom: 1px solid var(--surface-separator); } -.not-found-shell { - margin-top: 8vh; - padding: 30px; +.nextra-sidebar { + background: transparent !important; + border-right: 1px solid var(--surface-separator); } -.not-found-code { - margin: 0 0 12px; - color: var(--page-fg-muted); - font-size: 0.76rem; - font-weight: 600; - letter-spacing: 0.08em; - text-transform: uppercase; +.nextra-sidebar a { + border-radius: 6px; + transition: all 0.1s ease; + font-size: 0.9rem; + padding: 6px 12px; + color: var(--page-fg-soft); } -.nextra-content, -.nextra-sidebar, -.nextra-toc, -.nextra-footer, -.nextra-navbar { - font-family: - var(--font-sans), - 'PingFang SC', - 'Hiragino Sans GB', - 'Microsoft YaHei', - 'Noto Sans SC', - sans-serif; +.nextra-sidebar a:hover { + background: var(--surface-highlight); + color: var(--page-fg); } -.nextra-nav-container-blur, -.nextra-sidebar-footer, -.nextra-toc, -.nextra-navbar-blur { - backdrop-filter: blur(18px); +.nextra-sidebar :is(a[aria-current='page'], li.active > a) { + background: var(--surface-highlight-strong); + color: var(--page-fg); + font-weight: 600; } -.nextra-nav-container-blur, -.nextra-navbar-blur { - background: color-mix(in srgb, var(--page-bg) 84%, transparent); - border-color: var(--surface-border); +.nextra-toc { + background: transparent !important; + border-left: 1px solid var(--surface-separator); } -.nextra-sidebar, -.nextra-toc, -.nextra-footer { - border-color: var(--surface-border); +.nextra-toc a { + font-size: 0.85rem; + color: var(--page-fg-soft); } -.nextra-sidebar, -.nextra-toc { - background: color-mix(in srgb, var(--surface-strong) 90%, transparent); +.nextra-toc a[data-active='true'] { + color: var(--page-fg); + font-weight: 600; } -.nextra-toc { - display: block; +.nextra-body-typesetting-article { + max-width: 820px; + padding-top: 40px; + padding-bottom: 80px; } -.nextra-sidebar { - border-right: 1px solid var(--surface-border); +.nextra-body-typesetting-article h1 { + font-size: 3rem; + font-weight: 800; + letter-spacing: -0.04em; + margin-bottom: 32px; } -.nextra-sidebar::-webkit-scrollbar { - width: 8px; +.nextra-body-typesetting-article h2 { + font-size: 1.875rem; + font-weight: 700; + letter-spacing: -0.02em; + margin-top: 48px; + margin-bottom: 16px; + padding-bottom: 8px; + border-bottom: 1px solid var(--surface-separator); } -.nextra-sidebar::-webkit-scrollbar-thumb { - border-radius: 999px; - background: color-mix(in srgb, var(--page-fg-muted) 45%, transparent); +.nextra-body-typesetting-article h3 { + font-size: 1.5rem; + font-weight: 600; + letter-spacing: -0.01em; + margin-top: 32px; + margin-bottom: 12px; } -.nextra-sidebar :is(ul, ol) { - gap: 2px; +.nextra-body-typesetting-article p { + font-size: 1.05rem; + line-height: 1.7; + color: var(--page-fg-soft); + margin-bottom: 24px; } -.nextra-sidebar > div > ul { - gap: 10px; +.nextra-body-typesetting-article :not(pre) > code { + padding: 0.2em 0.4em; + font-size: 0.9em; + background: var(--inline-code-bg); + border-radius: 6px; + border: 1px solid var(--surface-separator); } -.nextra-sidebar > div > ul > li:has(> div) > :is(a, button) { - min-height: auto; - margin-top: 14px; - padding: 0 0 8px; - border: none; - border-radius: 0; - background: transparent; - color: var(--page-fg-muted); - font-size: 0.74rem; - font-weight: 700; - letter-spacing: 0.08em; - text-transform: uppercase; - pointer-events: none; +.docs-code-block { + margin: 28px 0; + border: 1px solid rgba(255, 255, 255, 0.12); + border-radius: 16px; + background: + linear-gradient(180deg, rgba(17, 24, 39, 0.9), rgba(4, 7, 12, 0.98) 34%), + #05070b; + box-shadow: 0 18px 48px rgba(0, 0, 0, 0.28); + overflow: hidden; + position: relative; } -.nextra-sidebar > div > ul > li:first-child:has(> div) > :is(a, button) { - margin-top: 0; +.docs-code-block-header { + display: flex; + align-items: flex-start; + justify-content: space-between; + gap: 16px; + padding: 14px 16px 12px; + border-bottom: 1px solid rgba(255, 255, 255, 0.08); + background: + linear-gradient(180deg, rgba(255, 255, 255, 0.03), rgba(255, 255, 255, 0)); } -.nextra-sidebar > div > ul > li:has(> div) > :is(a, button):hover { - border: none; - background: transparent; - color: var(--page-fg-muted); - transform: none; +.docs-code-block-meta { + display: flex; + align-items: center; + gap: 10px; + min-width: 0; + flex-wrap: wrap; } -.nextra-sidebar > div > ul > li:has(> div) > :is(a, button) svg { - display: none; +.docs-code-block-title, +.docs-code-block-language { + display: inline-flex; + align-items: center; + min-height: 28px; + border: 1px solid rgba(255, 255, 255, 0.1); + border-radius: 999px; + font-family: var(--font-mono); + letter-spacing: 0.08em; + font-weight: 600; } -.nextra-sidebar > div > ul > li:has(> div) > div { - height: auto !important; - overflow: visible !important; - opacity: 1 !important; - transition: none !important; +.docs-code-block-title { + max-width: min(100%, 28rem); + padding: 0 12px; + color: rgba(255, 255, 255, 0.9); + background: rgba(255, 255, 255, 0.06); + font-size: 0.74rem; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; } -.nextra-sidebar > div > ul > li:has(> div) > div > ul { - gap: 2px; - padding-left: 0; +.docs-code-block-language { + padding: 0 10px; + color: rgba(148, 163, 184, 0.92); + background: rgba(15, 23, 42, 0.75); + font-size: 0.68rem; + text-transform: uppercase; } -.nextra-sidebar a, -.nextra-toc a { +.docs-code-block-copy { + display: inline-flex; + align-items: center; + justify-content: center; + gap: 8px; + margin-left: auto; min-height: 34px; - border: 1px solid transparent; - border-radius: 10px; - color: var(--page-fg-soft); - font-size: 0.88rem; + padding: 0 12px; + border: 1px solid rgba(255, 255, 255, 0.12); + border-radius: 999px; + background: rgba(255, 255, 255, 0.03); + color: rgba(226, 232, 240, 0.78); + cursor: pointer; transition: + color 0.18s ease, border-color 0.18s ease, background-color 0.18s ease, - color 0.18s ease, transform 0.18s ease; } -.nextra-sidebar a:hover, -.nextra-toc a:hover { - border-color: var(--surface-border); - background: color-mix(in srgb, var(--surface-subtle) 84%, transparent); - color: var(--page-fg); - transform: translateX(1px); -} - -.nextra-sidebar :is(a[aria-current='page'], li.active > a) { - border-color: color-mix(in srgb, var(--surface-border-strong) 88%, transparent); - background: color-mix(in srgb, var(--surface-subtle) 88%, transparent); - color: var(--page-fg); -} - -.nextra-sidebar a[href^='#'], -.nextra-sidebar li.active > ul:has(a[href^='#']) { - display: none; -} - -.nextra-sidebar summary, -.nextra-sidebar [data-has-children='true'] { - font-size: 0.88rem; -} - -.nextra-sidebar :is(h2, h3, h4, [data-headings]) { - letter-spacing: -0.01em; -} - -.nextra-body-typesetting-article { - max-width: 920px; -} - -.nextra-body-typesetting-article { - color: var(--page-fg); - font-size: 0.95rem; - line-height: 1.78; -} - -.nextra-body-typesetting-article :where(h1, h2, h3, h4) { - letter-spacing: -0.035em; - line-height: 1.1; +.docs-code-block-copy:hover { + color: #ffffff; + border-color: rgba(255, 255, 255, 0.24); + background: rgba(255, 255, 255, 0.08); + transform: translateY(-1px); } -.nextra-body-typesetting-article h1 { - font-size: clamp(1.9rem, 3vw, 2.35rem); - margin-bottom: 0.95rem; +.docs-code-block-copy:focus-visible { + outline: 2px solid rgba(255, 255, 255, 0.45); + outline-offset: 2px; } -.nextra-body-typesetting-article h2 { - font-size: clamp(1.34rem, 2.1vw, 1.66rem); - margin-top: 2.15rem; +.docs-code-block-copy svg { + width: 14px; + height: 14px; + flex: 0 0 auto; } -.nextra-body-typesetting-article h3 { - font-size: 1.08rem; - margin-top: 1.6rem; +.docs-code-block-copy span { + line-height: 1; } -.nextra-body-typesetting-article h4 { - font-size: 0.98rem; - margin-top: 1.3rem; +.docs-code-block-content { + overflow-x: auto; } -.nextra-body-typesetting-article :where(p, li, blockquote) { - color: var(--page-fg-soft); +.docs-code-block-pre { + margin: 0 !important; + padding: 18px 20px 22px !important; + font-size: 12.5px !important; + background: transparent !important; + overflow-x: auto; + line-height: 1.65 !important; } -.nextra-body-typesetting-article a:not(.nextra-card) { - color: var(--page-fg); - text-decoration-line: underline; - text-decoration-color: var(--surface-border-strong); - text-decoration-thickness: 0.06em; - text-underline-offset: 0.22em; +.docs-code-block-pre code { + background: transparent !important; } -.nextra-body-typesetting-article a:not(.nextra-card):hover { - text-decoration-color: currentColor; -} +@media (max-width: 640px) { + .docs-code-block-header { + align-items: stretch; + flex-direction: column; + } -.nextra-body-typesetting-article :not(pre) > code { - padding: 0.16rem 0.38rem; - border: 1px solid var(--surface-border); - border-radius: 0.45rem; - background: var(--surface-subtle); - color: var(--page-fg); - font-size: 0.88em; + .docs-code-block-copy { + align-self: flex-end; + } } .nextra-body-typesetting-article pre { - border: 1px solid var(--surface-border); - border-radius: 18px; - background: color-mix(in srgb, var(--surface-muted) 90%, transparent) !important; - box-shadow: none; - font-size: 0.87rem; - line-height: 1.64; -} - -.nextra-body-typesetting-article pre code { - color: inherit; + padding: 16px !important; + font-size: 0.9rem !important; } .nextra-body-typesetting-article blockquote, .nextra-callout { - border: 1px solid var(--surface-border); - border-left: 2px solid var(--surface-border-strong); - border-radius: 0 16px 16px 0; - background: color-mix(in srgb, var(--surface-muted) 90%, transparent); + margin: 32px 0; + padding: 20px 24px; + border-radius: 8px; + border: 1px solid var(--surface-separator); + background: var(--surface-subtle); + border-left: 4px solid var(--accent); } .nextra-body-typesetting-article table { - font-size: 0.92rem; -} - -.nextra-body-typesetting-article :where(th, td) { - border-color: var(--surface-border); -} - -.nextra-body-typesetting-article .nextra-card { - border-color: var(--surface-border); - background: var(--surface); -} - -.nextra-body-typesetting-article .mermaid-diagram { - overflow: hidden; - margin-top: 1.25rem; - border: 1px solid rgba(147, 164, 191, 0.28); - border-radius: 20px; - background: - linear-gradient(180deg, rgba(15, 17, 23, 0.98) 0%, rgba(17, 24, 39, 0.98) 100%), - radial-gradient(circle at top, rgba(111, 142, 207, 0.16), transparent 48%); - box-shadow: - inset 0 1px 0 rgba(255, 255, 255, 0.04), - 0 18px 48px rgba(2, 6, 23, 0.28); -} - -.nextra-body-typesetting-article .mermaid-diagram__title { - padding: 14px 18px 0; - color: #9fb0cc; - font-size: 0.76rem; - font-weight: 600; - letter-spacing: 0.08em; - text-transform: uppercase; -} - -.nextra-body-typesetting-article .mermaid-diagram__canvas { - overflow-x: auto; - padding: 18px; -} - -.nextra-body-typesetting-article .mermaid-diagram__canvas svg { - display: block; - height: auto; - margin: 0 auto; -} - -.nextra-body-typesetting-article .mermaid-diagram__loading, -.nextra-body-typesetting-article .mermaid-diagram__error { - color: #d7e0ef; - font-size: 0.88rem; -} - -.nextra-body-typesetting-article .mermaid-diagram__fallback { - margin: 0; - border: 1px solid rgba(147, 164, 191, 0.18); - border-radius: 16px; - background: rgba(15, 17, 23, 0.82) !important; -} - -.nextra-body-typesetting-article .mermaid-diagram__error { - margin: 0; - padding: 0 18px 18px; -} - -.nextra-body-typesetting-article .docs-widget { - margin: 1.5rem 0; -} - -.nextra-body-typesetting-article .docs-widget-header { - margin-bottom: 0.9rem; -} - -.nextra-body-typesetting-article .docs-widget-header h3 { - margin: 0; - font-size: 1rem; -} - -.nextra-body-typesetting-article .docs-widget-header p { - margin: 0.45rem 0 0; - color: var(--page-fg-soft); -} - -.nextra-body-typesetting-article .docs-table-shell { - overflow-x: auto; - border: 1px solid var(--surface-border); - border-radius: 20px; - background: color-mix(in srgb, var(--surface-strong) 92%, transparent); - box-shadow: var(--shadow-sm); -} - -.nextra-body-typesetting-article .docs-widget-table { - width: 100%; - min-width: 720px; - margin: 0; - border: 0; - border-collapse: separate; - border-spacing: 0; - background: transparent; -} - -.nextra-body-typesetting-article .docs-widget-table thead { - background: color-mix(in srgb, var(--surface-subtle) 88%, transparent); -} - -.nextra-body-typesetting-article .docs-widget-table :where(th, td) { - vertical-align: top; - border-right: 1px solid var(--surface-border); - border-bottom: 1px solid var(--surface-border); -} - -.nextra-body-typesetting-article .docs-widget-table :where(th, td):last-child { - border-right: 0; -} - -.nextra-body-typesetting-article .docs-widget-table tbody tr:last-child td { - border-bottom: 0; -} - -.nextra-body-typesetting-article .docs-cell-heading { - display: flex; - flex-direction: column; - gap: 0.45rem; -} - -.nextra-body-typesetting-article .docs-muted { - color: var(--page-fg-muted); -} - -.nextra-body-typesetting-article .docs-table-list, -.nextra-body-typesetting-article .docs-platform-card__highlights { - margin: 0; - padding-left: 1.1rem; -} - -.nextra-body-typesetting-article .docs-table-list li, -.nextra-body-typesetting-article .docs-platform-card__highlights li { - margin: 0.15rem 0; - color: var(--page-fg-soft); -} - -.nextra-body-typesetting-article .docs-badge { - display: inline-flex; - align-items: center; - justify-content: center; - min-height: 26px; - padding: 0 0.7rem; - border: 1px solid var(--surface-border); - border-radius: 999px; - font-size: 0.72rem; - font-weight: 700; - letter-spacing: 0.05em; - text-transform: uppercase; -} - -.nextra-body-typesetting-article .docs-badge--stable, -.nextra-body-typesetting-article .docs-badge--full { - border-color: rgba(20, 132, 86, 0.22); - background: rgba(20, 132, 86, 0.1); - color: #116149; -} - -.nextra-body-typesetting-article .docs-badge--partial, -.nextra-body-typesetting-article .docs-badge--beta { - border-color: rgba(180, 83, 9, 0.2); - background: rgba(180, 83, 9, 0.1); - color: #92400e; -} - -.nextra-body-typesetting-article .docs-badge--planned, -.nextra-body-typesetting-article .docs-badge--experimental, -.nextra-body-typesetting-article .docs-badge--info { - border-color: rgba(37, 99, 235, 0.18); - background: rgba(37, 99, 235, 0.08); - color: #1d4ed8; -} - -.nextra-body-typesetting-article .docs-badge--deprecated, -.nextra-body-typesetting-article .docs-badge--unsupported { - border-color: rgba(185, 28, 28, 0.18); - background: rgba(185, 28, 28, 0.08); - color: #b91c1c; -} - -.nextra-body-typesetting-article .docs-platform-grid { - display: grid; - grid-template-columns: repeat(3, minmax(0, 1fr)); - gap: 14px; -} - -.nextra-body-typesetting-article .docs-platform-card { - position: relative; - overflow: hidden; - padding: 20px; - border: 1px solid var(--surface-border); - border-radius: 22px; - background: - linear-gradient(180deg, color-mix(in srgb, var(--surface-strong) 94%, transparent), var(--surface)), - var(--hero-glow); - box-shadow: var(--shadow-sm); -} - -.nextra-body-typesetting-article .docs-platform-card::before { - content: ''; - position: absolute; - inset: 0 auto auto 0; width: 100%; - height: 1px; - background: linear-gradient(90deg, transparent, var(--surface-border-strong), transparent); -} - -.nextra-body-typesetting-article .docs-platform-card__top { - display: flex; - align-items: flex-start; - justify-content: space-between; - gap: 14px; -} - -.nextra-body-typesetting-article .docs-platform-card__family { - display: inline-flex; - margin-bottom: 0.5rem; - color: var(--page-fg-muted); - font-size: 0.72rem; - font-weight: 700; - letter-spacing: 0.08em; - text-transform: uppercase; -} - -.nextra-body-typesetting-article .docs-platform-card h3 { - margin: 0; - font-size: 1.02rem; -} - -.nextra-body-typesetting-article .docs-platform-card p { - margin: 0.75rem 0 0; -} - -.nextra-body-typesetting-article .docs-command-stack { - display: flex; - flex-direction: column; - gap: 0.45rem; + border-collapse: collapse; + margin: 32px 0; + font-size: 0.95rem; } -.nextra-body-typesetting-article .docs-command-chip { - display: block; - width: fit-content; - max-width: min(100%, 36rem); - padding: 0.45rem 0.6rem; - border: 1px solid var(--surface-border); - border-radius: 12px; - background: color-mix(in srgb, var(--surface-subtle) 90%, transparent); +.nextra-body-typesetting-article th { + text-align: left; + padding: 12px 16px; + border-bottom: 2px solid var(--surface-separator); color: var(--page-fg); - white-space: pre-wrap; - word-break: break-word; -} - -html.dark .home-topbar-nav a:hover, -html.dark .docs-nav-link:hover, -html.dark .docs-navbar-action:hover { - border-color: var(--surface-border); - background: var(--surface-highlight); -} - -html.dark .home-hero-copy, -html.dark .home-link-card, -html.dark .capability-card, -html.dark .reading-path-card, -html.dark .not-found-shell, -html.dark .proof-pill { - border-color: var(--surface-border); - background: - linear-gradient( - 180deg, - color-mix(in srgb, var(--surface-overlay) 92%, transparent), - color-mix(in srgb, var(--surface) 94%, transparent) - ); - box-shadow: - inset 0 1px 0 var(--surface-highlight), - var(--shadow-sm); -} - -html.dark .home-hero-copy { - box-shadow: - inset 0 1px 0 var(--surface-highlight), - var(--shadow-md); -} - -html.dark .home-hero-copy::before, -html.dark .home-link-card::before, -html.dark .capability-card::before, -html.dark .reading-path-card::before { - background: linear-gradient(90deg, transparent, var(--surface-separator), transparent); -} - -html.dark .proof-pill { - background: - linear-gradient( - 180deg, - color-mix(in srgb, var(--surface-elevated) 82%, transparent), - color-mix(in srgb, var(--surface-overlay) 88%, transparent) - ); -} - -html.dark .home-link-card:hover, -html.dark .capability-card:hover, -html.dark .reading-path-card:hover { - border-color: var(--surface-border); - background: - linear-gradient( - 180deg, - color-mix(in srgb, var(--surface-elevated) 88%, transparent), - color-mix(in srgb, var(--surface-overlay) 96%, transparent) - ); - box-shadow: - inset 0 1px 0 var(--surface-highlight-strong), - 0 22px 54px rgba(0, 0, 0, 0.2); -} - -html.dark .hero-button { - border-color: var(--surface-border); - box-shadow: inset 0 1px 0 var(--surface-highlight); -} - -html.dark .hero-button-primary { - border-color: rgba(255, 255, 255, 0.08); - background: color-mix(in srgb, var(--button-primary-bg) 95%, transparent); - color: var(--button-primary-fg); - box-shadow: - inset 0 1px 0 rgba(255, 255, 255, 0.18), - 0 16px 40px rgba(0, 0, 0, 0.18); -} - -html.dark .hero-button-primary:hover { - box-shadow: - inset 0 1px 0 rgba(255, 255, 255, 0.22), - 0 18px 42px rgba(0, 0, 0, 0.22); -} - -html.dark .hero-button-secondary { - border-color: var(--surface-border); - background: var(--button-secondary-bg); - color: var(--button-secondary-fg); -} - -html.dark .hero-button-secondary:hover { - background: color-mix(in srgb, var(--surface-overlay) 72%, var(--surface-highlight)); -} - -html.dark .nextra-nav-container-blur, -html.dark .nextra-navbar-blur { - background: color-mix(in srgb, var(--page-bg) 78%, var(--surface-overlay)); - border-color: var(--surface-separator); -} - -html.dark .docs-brand-badge { - border-color: var(--surface-border); - background: color-mix(in srgb, var(--surface-highlight) 100%, var(--surface-overlay)); -} - -html.dark .docs-navbar-action { - border-color: var(--surface-border); - background: color-mix(in srgb, var(--surface-highlight) 100%, var(--surface-overlay)); -} - -html.dark .nextra-sidebar, -html.dark .nextra-toc { - border-color: var(--surface-separator); - background: - linear-gradient( - 180deg, - color-mix(in srgb, var(--surface-strong) 94%, transparent), - color-mix(in srgb, var(--surface-overlay) 96%, transparent) - ); - box-shadow: inset 0 1px 0 var(--surface-highlight); -} - -html.dark .nextra-sidebar { - border-right-color: var(--surface-separator); -} - -html.dark .nextra-footer { - border-color: var(--surface-separator); - color: var(--page-fg-muted); -} - -html.dark .nextra-sidebar a, -html.dark .nextra-toc a { - border-color: transparent; -} - -html.dark .nextra-sidebar a:hover, -html.dark .nextra-toc a:hover { - border-color: var(--surface-border); - background: color-mix(in srgb, var(--surface-highlight-strong) 100%, var(--surface-overlay)); - transform: translateX(0); -} - -html.dark .nextra-sidebar :is(a[aria-current='page'], li.active > a), -html.dark .nextra-toc a[data-active='true'] { - border-color: var(--surface-border); - background: - linear-gradient( - 180deg, - color-mix(in srgb, var(--surface-highlight-strong) 100%, var(--surface-elevated)), - color-mix(in srgb, var(--surface-highlight) 100%, var(--surface-overlay)) - ); - box-shadow: inset 0 1px 0 var(--surface-highlight); -} - -html.dark .nextra-body-typesetting-article a:not(.nextra-card) { - text-decoration-color: var(--surface-separator); -} - -html.dark .nextra-body-typesetting-article hr { - border-color: var(--surface-separator); - opacity: 0.72; -} - -html.dark .nextra-body-typesetting-article :not(pre) > code { - border-color: var(--surface-border); - background: var(--inline-code-bg); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.024); -} - -html.dark .nextra-body-typesetting-article pre { - border-color: var(--surface-border); - background: - linear-gradient( - 180deg, - color-mix(in srgb, var(--surface-muted) 94%, transparent), - color-mix(in srgb, var(--surface-elevated) 92%, transparent) - ) !important; - box-shadow: - inset 0 1px 0 var(--surface-highlight), - 0 18px 44px rgba(0, 0, 0, 0.14); -} - -html.dark .nextra-body-typesetting-article blockquote, -html.dark .nextra-callout { - border-color: var(--surface-border); - border-left-color: var(--surface-separator); - background: - linear-gradient( - 180deg, - color-mix(in srgb, var(--surface-highlight) 100%, var(--surface-muted)), - color-mix(in srgb, var(--surface-overlay) 92%, transparent) - ); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.02); -} - -html.dark .nextra-body-typesetting-article table { - overflow: hidden; - border: 1px solid var(--surface-border); - border-collapse: separate; - border-spacing: 0; - border-radius: 18px; - background: color-mix(in srgb, var(--surface-overlay) 96%, transparent); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.02); -} - -html.dark .nextra-body-typesetting-article thead { - background: color-mix(in srgb, var(--surface-highlight-strong) 100%, var(--surface-elevated)); -} - -html.dark .nextra-body-typesetting-article tbody tr { - background: color-mix(in srgb, var(--surface-overlay) 94%, transparent); -} - -html.dark .nextra-body-typesetting-article tbody tr:nth-child(even) { - background: color-mix(in srgb, var(--surface-muted) 82%, transparent); -} - -html.dark .nextra-body-typesetting-article tbody tr:hover { - background: color-mix(in srgb, var(--surface-highlight) 100%, var(--surface-elevated)); -} - -html.dark .nextra-body-typesetting-article :where(th, td) { - border-color: var(--surface-separator); -} - -html.dark .nextra-body-typesetting-article .nextra-card { - border-color: var(--surface-border); - background: - linear-gradient( - 180deg, - color-mix(in srgb, var(--surface-overlay) 94%, transparent), - color-mix(in srgb, var(--surface) 92%, transparent) - ); - box-shadow: - inset 0 1px 0 var(--surface-highlight), - var(--shadow-sm); -} - -html.dark .nextra-body-typesetting-article .mermaid-diagram { - border-color: var(--surface-border); - background: - linear-gradient( - 180deg, - color-mix(in srgb, var(--surface-muted) 96%, transparent), - color-mix(in srgb, var(--surface-elevated) 92%, transparent) - ); - box-shadow: - inset 0 1px 0 var(--surface-highlight), - 0 18px 44px rgba(0, 0, 0, 0.16); -} - -html.dark .nextra-body-typesetting-article .mermaid-diagram__title { - color: var(--page-fg-muted); + font-weight: 600; } -html.dark .nextra-body-typesetting-article .mermaid-diagram__loading, -html.dark .nextra-body-typesetting-article .mermaid-diagram__error { +.nextra-body-typesetting-article td { + padding: 12px 16px; + border-bottom: 1px solid var(--surface-separator); color: var(--page-fg-soft); } -html.dark .nextra-body-typesetting-article .mermaid-diagram__fallback { - border-color: var(--surface-border); - background: color-mix(in srgb, var(--surface-overlay) 96%, transparent) !important; -} - -html.dark .nextra-body-typesetting-article .docs-table-shell { - border-color: var(--surface-border); - background: - linear-gradient( - 180deg, - color-mix(in srgb, var(--surface-overlay) 96%, transparent), - color-mix(in srgb, var(--surface) 92%, transparent) - ); - box-shadow: - inset 0 1px 0 var(--surface-highlight), - var(--shadow-sm); -} - -html.dark .nextra-body-typesetting-article .docs-widget-table thead { - background: color-mix(in srgb, var(--surface-highlight-strong) 100%, var(--surface-elevated)); -} - -html.dark .nextra-body-typesetting-article .docs-widget-table :where(th, td) { - border-color: var(--surface-separator); -} - -html.dark .nextra-body-typesetting-article .docs-badge--stable, -html.dark .nextra-body-typesetting-article .docs-badge--full { - border-color: rgba(74, 222, 128, 0.18); - background: rgba(74, 222, 128, 0.1); - color: #86efac; -} - -html.dark .nextra-body-typesetting-article .docs-badge--partial, -html.dark .nextra-body-typesetting-article .docs-badge--beta { - border-color: rgba(251, 191, 36, 0.16); - background: rgba(251, 191, 36, 0.1); - color: #fcd34d; -} - -html.dark .nextra-body-typesetting-article .docs-badge--planned, -html.dark .nextra-body-typesetting-article .docs-badge--experimental, -html.dark .nextra-body-typesetting-article .docs-badge--info { - border-color: rgba(96, 165, 250, 0.16); - background: rgba(96, 165, 250, 0.1); - color: #93c5fd; -} - -html.dark .nextra-body-typesetting-article .docs-badge--deprecated, -html.dark .nextra-body-typesetting-article .docs-badge--unsupported { - border-color: rgba(248, 113, 113, 0.16); - background: rgba(248, 113, 113, 0.1); - color: #fca5a5; -} - -html.dark .nextra-body-typesetting-article .docs-platform-card { - border-color: var(--surface-border); - background: - linear-gradient( - 180deg, - color-mix(in srgb, var(--surface-overlay) 94%, transparent), - color-mix(in srgb, var(--surface) 92%, transparent) - ); - box-shadow: - inset 0 1px 0 var(--surface-highlight), - var(--shadow-sm); -} - -html.dark .nextra-body-typesetting-article .docs-platform-card::before { - background: linear-gradient(90deg, transparent, var(--surface-separator), transparent); -} - -html.dark .nextra-body-typesetting-article .docs-command-chip { - border-color: var(--surface-border); - background: color-mix(in srgb, var(--surface-highlight) 100%, var(--surface-elevated)); - box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.03); -} - -.nextra-footer { - color: var(--page-fg-muted); -} - -.motion-rise { - animation: rise-in 0.56s cubic-bezier(0.16, 1, 0.3, 1) both; -} - -.motion-stagger > * { - animation: rise-in 0.56s cubic-bezier(0.16, 1, 0.3, 1) both; -} - -.motion-stagger > :nth-child(1) { - animation-delay: 0.04s; -} - -.motion-stagger > :nth-child(2) { - animation-delay: 0.1s; -} - -.motion-stagger > :nth-child(3) { - animation-delay: 0.16s; -} - -.motion-stagger > :nth-child(4) { - animation-delay: 0.22s; -} - -@keyframes rise-in { - from { - opacity: 0; - transform: translateY(16px); - } - - to { - opacity: 1; - transform: translateY(0); - } -} - -@media (max-width: 1080px) { - .reading-path-grid { - grid-template-columns: repeat(2, minmax(0, 1fr)); - } +.docs-badge { + padding: 2px 8px; + border-radius: 4px; + font-size: 0.75rem; + font-weight: 600; + text-transform: uppercase; } -@media (max-width: 960px) { - .docs-home, - .not-found-shell { - width: min(100% - 24px, 1080px); - } - - .home-topbar { - flex-direction: column; - align-items: flex-start; - } - - .home-hero-copy { - padding: 28px 24px 24px; - } +.docs-badge--stable { background: rgba(16, 185, 129, 0.1); color: #10b981; } +.docs-badge--beta { background: rgba(245, 158, 11, 0.1); color: #f59e0b; } +.docs-badge--experimental { background: rgba(139, 92, 246, 0.1); color: #8b5cf6; } +@media (max-width: 768px) { .home-proof-strip, .home-link-grid, .capability-grid, .reading-path-grid { - grid-template-columns: minmax(0, 1fr); - } - - .nextra-body-typesetting-article .docs-platform-grid { grid-template-columns: 1fr; } - + .home-hero h1 { - max-width: 13ch; - font-size: clamp(1.8rem, 10vw, 2.45rem); + font-size: 2.5rem; } - - .docs-site-navbar nav:not(.docs-navbar-links) { - gap: 12px; - } -} - -@media (max-width: 768px) { - .docs-brand-badge, - .docs-navbar-shell { + + .docs-navbar-actions { display: none; } } - -@media (prefers-reduced-motion: reduce) { - html { - scroll-behavior: auto; - } - - *, - *::before, - *::after { - animation-duration: 0.01ms !important; - animation-iteration-count: 1 !important; - transition-duration: 0.01ms !important; - scroll-behavior: auto !important; - } -} diff --git a/doc/app/home-page.mdx b/doc/app/home-page.mdx index 45f80343..3ad74725 100644 --- a/doc/app/home-page.mdx +++ b/doc/app/home-page.mdx @@ -15,7 +15,7 @@ import {