diff --git a/.githooks/sync-versions.ts b/.githooks/sync-versions.ts index ee57cc07..5fa9e63e 100644 --- a/.githooks/sync-versions.ts +++ b/.githooks/sync-versions.ts @@ -118,7 +118,24 @@ for (const pkg of [...packages, ...libraryPackages, ...packagesPackages, ...cliN } } -// Sync Cargo.toml version +// Sync root workspace Cargo.toml version +const workspaceCargoTomlPath = resolve('Cargo.toml') +try { + const cargoContent = readFileSync(workspaceCargoTomlPath, 'utf-8') + const cargoUpdated = cargoContent.replace( + /(\[workspace\.package\][\s\S]*?^version = ")([^"]+)(")/m, + `$1${rootVersion}$3`, + ) + if (cargoContent !== cargoUpdated) { + writeFileSync(workspaceCargoTomlPath, cargoUpdated, 'utf-8') + console.log(` ✓ workspace Cargo.toml: version → ${rootVersion}`) + changed = true + } +} catch { + console.log('⚠️ Cargo.toml not found, skipping') +} + +// Sync GUI Cargo.toml version const cargoTomlPath = resolve('gui/src-tauri/Cargo.toml') try { const cargoContent = readFileSync(cargoTomlPath, 'utf-8') @@ -172,6 +189,8 @@ if (changed) { console.log('\n📦 Versions synced, auto-staging changes...') try { const filesToStage = [ + 'package.json', + 'Cargo.toml', 'cli/package.json', 'gui/package.json', 'doc/package.json', @@ -181,7 +200,7 @@ if (changed) { ...libraryPackages.map(p => p.path), ...packagesPackages.map(p => p.path), ...cliNpmPackages.map(p => p.path), - ] + ].filter(path => existsSync(resolve(path))) execSync( `git add ${filesToStage.join(' ')}`, { stdio: 'inherit' } diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..b0c922ad --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,10 @@ +# Default owner for repository changes +* @TrueNine + +# High-risk directories +/.github/ @TrueNine +/cli/ @TrueNine +/libraries/ @TrueNine +/gui/ @TrueNine +/doc/ @TrueNine +/scripts/ @TrueNine diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 7be93e8b..03407006 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -21,6 +21,8 @@ body: options: - label: I have searched for duplicate or closed issues required: true + - label: I redacted secrets, tokens, and private paths from logs or config snippets + required: true - type: input id: os attributes: @@ -37,6 +39,14 @@ body: placeholder: "e.g., 2026.10125.0" validations: required: true + - type: input + id: first-bad-version + attributes: + label: First version where this appeared + description: If known, tell us the first version where you noticed the problem + placeholder: "e.g., worked in 2026.10222.0, broken in 2026.10303.11117" + validations: + required: false - type: dropdown id: context attributes: @@ -47,6 +57,20 @@ body: - Both / unsure validations: required: true + - type: dropdown + id: affected-module + attributes: + label: Affected module + description: Pick the closest area so maintainers can route triage faster + options: + - Config / sync core + - Plugin or adapter pipeline + - Native module / N-API binding + - CLI packaging / release artifact + - GUI shell / updater + - Docs / unclear + validations: + required: true - type: input id: plugin-context attributes: @@ -87,7 +111,7 @@ body: id: additional-context attributes: label: Additional Context - description: Logs, config, screenshots, workarounds - placeholder: "Error messages, .tnmsc.json (redact secrets), or environment details." + description: Logs, config, screenshots, workarounds, and whether config snippets were redacted + placeholder: "Error messages, .tnmsc.json (with secrets removed), screenshots, or environment details." validations: required: false diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 8005e322..8da77524 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,2 +1,8 @@ blank_issues_enabled: false -contact_links: [] +contact_links: + - name: Documentation and setup guide + url: https://github.com/TrueNine/memory-sync/tree/main/doc + about: Read the docs and setup references before opening a support issue. + - name: Security policy + url: https://github.com/TrueNine/memory-sync/security/policy + about: Report vulnerabilities through the security policy instead of opening a public issue. diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index 1b5d940e..9ec0d26d 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -8,6 +8,26 @@ body: Before submitting, please [search and confirm](https://github.com/TrueNine/memory-sync/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-desc) that the idea does not yet exist. Duplicate requests may be closed. --- + - type: checkboxes + id: confirm + attributes: + label: Before opening, please confirm + options: + - label: I searched for an existing request or discussion first + required: true + - type: dropdown + id: area + attributes: + label: Affected area + description: Choose the closest part of the product + options: + - CLI (tnmsc) + - Shared Rust / N-API libraries + - GUI (Tauri app) + - Docs or onboarding + - Multiple / unsure + validations: + required: true - type: textarea id: feature-description attributes: @@ -24,6 +44,14 @@ body: placeholder: "Concrete examples: which target (Cursor/Kiro/Warp/…), which input source, typical workflow." validations: required: true + - type: textarea + id: current-workaround + attributes: + label: Current workaround + description: If you already have a manual workaround, describe it + placeholder: "Explain how you are handling this today, and what is still painful." + validations: + required: false - type: textarea id: additional-context attributes: diff --git a/.github/actions/setup-tauri/action.yml b/.github/actions/setup-tauri/action.yml index ca600d1c..ef577d15 100644 --- a/.github/actions/setup-tauri/action.yml +++ b/.github/actions/setup-tauri/action.yml @@ -13,6 +13,15 @@ inputs: runs: using: composite steps: + - name: Cache Linux package archives + if: runner.os == 'Linux' + uses: actions/cache@v4 + with: + path: /var/cache/apt/archives + key: ${{ runner.os }}-tauri-apt-${{ hashFiles('.github/actions/setup-tauri/action.yml') }} + restore-keys: | + ${{ runner.os }}-tauri-apt- + - name: Install Linux dependencies if: runner.os == 'Linux' shell: bash @@ -46,7 +55,7 @@ runs: path: | ~/.cargo/registry ~/.cargo/git - gui/src-tauri/target + target key: ${{ runner.os }}-cargo-${{ steps.cargo-deps-hash.outputs.hash }} restore-keys: | ${{ runner.os }}-cargo- @@ -67,6 +76,6 @@ runs: - name: Clean old bundle artifacts shell: bash run: | - if [ -d "gui/src-tauri/target" ]; then - find gui/src-tauri/target -type d -name bundle -prune -exec rm -rf {} + + if [ -d "target" ]; then + find target -type d -name bundle -prune -exec rm -rf {} + fi diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..8a00c658 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,145 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: / + schedule: + interval: weekly + day: monday + time: '09:00' + timezone: Asia/Shanghai + open-pull-requests-limit: 5 + labels: + - dependencies + - github-actions + groups: + github-actions: + patterns: + - '*' + + - package-ecosystem: cargo + directory: / + schedule: + interval: weekly + day: monday + time: '09:00' + timezone: Asia/Shanghai + open-pull-requests-limit: 5 + labels: + - dependencies + - rust + groups: + cargo-workspace: + patterns: + - '*' + + - package-ecosystem: npm + directory: / + schedule: + interval: weekly + day: monday + time: '09:00' + timezone: Asia/Shanghai + open-pull-requests-limit: 5 + labels: + - dependencies + - javascript + groups: + npm-root: + patterns: + - '*' + + - package-ecosystem: npm + directory: /cli + schedule: + interval: weekly + day: monday + time: '09:00' + timezone: Asia/Shanghai + open-pull-requests-limit: 5 + labels: + - dependencies + - javascript + groups: + npm-cli: + patterns: + - '*' + + - package-ecosystem: npm + directory: /gui + schedule: + interval: weekly + day: monday + time: '09:00' + timezone: Asia/Shanghai + open-pull-requests-limit: 5 + labels: + - dependencies + - javascript + groups: + npm-gui: + patterns: + - '*' + + - package-ecosystem: npm + directory: /doc + schedule: + interval: weekly + day: monday + time: '09:00' + timezone: Asia/Shanghai + open-pull-requests-limit: 5 + labels: + - dependencies + - javascript + groups: + npm-doc: + patterns: + - '*' + + - package-ecosystem: npm + directory: /libraries/logger + schedule: + interval: weekly + day: monday + time: '09:00' + timezone: Asia/Shanghai + open-pull-requests-limit: 5 + labels: + - dependencies + - javascript + groups: + npm-logger: + patterns: + - '*' + + - package-ecosystem: npm + directory: /libraries/md-compiler + schedule: + interval: weekly + day: monday + time: '09:00' + timezone: Asia/Shanghai + open-pull-requests-limit: 5 + labels: + - dependencies + - javascript + groups: + npm-md-compiler: + patterns: + - '*' + + - package-ecosystem: npm + directory: /libraries/script-runtime + schedule: + interval: weekly + day: monday + time: '09:00' + timezone: Asia/Shanghai + open-pull-requests-limit: 5 + labels: + - dependencies + - javascript + groups: + npm-script-runtime: + patterns: + - '*' diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 00000000..dfd9d304 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,38 @@ +## Summary + +- Describe the change in one or two sentences. + +## Scope + +- [ ] CLI (`cli/`) +- [ ] Shared Rust / N-API libraries (`libraries/`) +- [ ] GUI (`gui/`) +- [ ] Docs (`doc/`) +- [ ] GitHub automation / release chain (`.github/`) + +## Release Impact + +- [ ] No release or version impact +- [ ] Affects npm package contents or NAPI publishing +- [ ] Affects GUI artifacts or GitHub Release flow +- [ ] Affects config schema, CLI flags, or user-facing behavior + +Details: + +## Testing + +- [ ] Not run locally +- [ ] Local checks passed +- [ ] Workflow or YAML validation passed + +Commands / evidence: + +## Risks and Follow-ups + +- List rollout risks, migrations, or follow-up tasks if any. + +## Checklist + +- [ ] I reviewed version and release implications +- [ ] I called out any `.github/` or workflow changes above +- [ ] I documented remaining follow-up work, if any diff --git a/.github/workflows/build-gui-all.yml b/.github/workflows/build-gui-all.yml index 6a31a4e4..0ffbe598 100644 --- a/.github/workflows/build-gui-all.yml +++ b/.github/workflows/build-gui-all.yml @@ -50,8 +50,10 @@ jobs: with: name: gui-${{ matrix.os }} path: | - gui/src-tauri/target/*/release/bundle/**/*.exe - gui/src-tauri/target/release/bundle/**/*.exe + target/*/release/bundle/**/*.exe + target/release/bundle/**/*.exe + target/*/release/bundle/**/*.sig + target/release/bundle/**/*.sig if-no-files-found: error - name: Upload Linux artifacts @@ -60,12 +62,14 @@ jobs: with: name: gui-${{ matrix.os }} path: | - gui/src-tauri/target/*/release/bundle/**/*.AppImage - gui/src-tauri/target/*/release/bundle/**/*.deb - gui/src-tauri/target/*/release/bundle/**/*.rpm - gui/src-tauri/target/release/bundle/**/*.AppImage - gui/src-tauri/target/release/bundle/**/*.deb - gui/src-tauri/target/release/bundle/**/*.rpm + target/*/release/bundle/**/*.AppImage + target/*/release/bundle/**/*.deb + target/*/release/bundle/**/*.rpm + target/*/release/bundle/**/*.sig + target/release/bundle/**/*.AppImage + target/release/bundle/**/*.deb + target/release/bundle/**/*.rpm + target/release/bundle/**/*.sig if-no-files-found: error - name: Upload macOS artifacts @@ -74,6 +78,10 @@ jobs: with: name: gui-${{ matrix.os }} path: | - gui/src-tauri/target/*/release/bundle/**/*.dmg - gui/src-tauri/target/release/bundle/**/*.dmg + target/*/release/bundle/**/*.dmg + target/*/release/bundle/**/*.tar.gz + target/*/release/bundle/**/*.sig + target/release/bundle/**/*.dmg + target/release/bundle/**/*.tar.gz + target/release/bundle/**/*.sig if-no-files-found: error diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml deleted file mode 100644 index 23a4c6de..00000000 --- a/.github/workflows/deploy-docs.yml +++ /dev/null @@ -1,12 +0,0 @@ -name: Deploy Docs - -# TODO: Vercel deployment for doc/ Next.js site - -on: - workflow_dispatch: - -jobs: - deploy: - runs-on: ubuntu-24.04 - steps: - - run: echo "TODO" diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml index da0b3599..cc9cd7b8 100644 --- a/.github/workflows/pull-request.yml +++ b/.github/workflows/pull-request.yml @@ -5,6 +5,18 @@ on: branches: - main types: [opened, synchronize, reopened, ready_for_review] + paths-ignore: + - .attachments/** + - AGENTS.md + - CLAUDE.md + - GEMINI.md + - README.md + - CODE_OF_CONDUCT.md + - LICENSE + - SECURITY.md + +permissions: + contents: read concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number }} @@ -12,7 +24,9 @@ concurrency: jobs: check: + if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 + timeout-minutes: 45 steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/release-cli.yml b/.github/workflows/release-cli.yml index fbb15137..7f49ea1c 100644 --- a/.github/workflows/release-cli.yml +++ b/.github/workflows/release-cli.yml @@ -1,5 +1,18 @@ name: Release CLI +env: + NODE_VERSION: '25' + CLI_NATIVE_MODULE_DIRS: | + libraries/logger + libraries/md-compiler + libraries/script-runtime + cli + CLI_NATIVE_BINDING_PREFIXES: | + napi-logger. + napi-md-compiler. + napi-script-runtime. + napi-memory-sync-cli. + concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: false @@ -8,15 +21,31 @@ on: push: branches: - main + paths: + - .github/actions/** + - .github/workflows/build-gui-all.yml + - .github/workflows/release-*.yml + - assets/** + - cli/** + - gui/** + - libraries/** + - scripts/** + - Cargo.toml + - Cargo.lock + - package.json + - pnpm-lock.yaml + - pnpm-workspace.yaml + - turbo.json workflow_dispatch: permissions: - contents: write + contents: read jobs: # 1. 版本检查(快速,决定是否继续) check-version: runs-on: ubuntu-24.04 + timeout-minutes: 10 outputs: publish: ${{ steps.check.outputs.publish }} version: ${{ steps.check.outputs.version }} @@ -24,7 +53,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: 25 + node-version: ${{ env.NODE_VERSION }} - name: Check if should publish id: check @@ -44,6 +73,7 @@ jobs: # 1.5. GUI 版本检查(独立于 npm,检查 GitHub Release) check-gui-version: runs-on: ubuntu-24.04 + timeout-minutes: 10 outputs: should_release: ${{ steps.check.outputs.should_release }} version: ${{ steps.check.outputs.version }} @@ -72,6 +102,7 @@ jobs: build-napi: needs: check-version if: needs.check-version.outputs.publish == 'true' + timeout-minutes: 45 strategy: fail-fast: false matrix: @@ -109,29 +140,50 @@ jobs: - name: Build all napi native modules shell: bash run: | - module_dirs=(libraries/logger libraries/md-compiler cli) - for module_dir in "${module_dirs[@]}"; do + while IFS= read -r module_dir; do + if [ -z "$module_dir" ]; then + continue + fi echo "Building napi in ${module_dir}..." ( cd "${module_dir}" && \ pnpm exec napi build --platform --release --target ${{ matrix.target.rust }} --output-dir dist -- --features napi ) - done + done <<< "$CLI_NATIVE_MODULE_DIRS" - name: Collect .node files into CLI platform package shell: bash run: | target_dir="cli/npm/${{ matrix.target.suffix }}" mkdir -p "$target_dir" shopt -s nullglob - module_dirs=(libraries/logger libraries/md-compiler cli) - for module_dir in "${module_dirs[@]}"; do + while IFS= read -r module_dir; do + if [ -z "$module_dir" ]; then + continue + fi node_files=("${module_dir}"/dist/*.node) if [ "${#node_files[@]}" -eq 0 ]; then echo "ERROR: no .node files found in ${module_dir}/dist" exit 1 fi cp "${node_files[@]}" "$target_dir/" - done + done <<< "$CLI_NATIVE_MODULE_DIRS" + expected_count=0 + while IFS= read -r binding_prefix; do + if [ -z "$binding_prefix" ]; then + continue + fi + expected_count=$((expected_count + 1)) + matches=("$target_dir"/${binding_prefix}*.node) + if [ "${#matches[@]}" -eq 0 ]; then + echo "ERROR: missing binding with prefix ${binding_prefix} in ${target_dir}" + exit 1 + fi + done <<< "$CLI_NATIVE_BINDING_PREFIXES" + actual_count=$(find "$target_dir" -maxdepth 1 -type f -name '*.node' | wc -l | tr -d ' ') + if [ "$actual_count" -ne "$expected_count" ]; then + echo "ERROR: expected ${expected_count} .node files in ${target_dir}, found ${actual_count}" + exit 1 + fi echo "Contents of $target_dir:" ls -la "$target_dir/" - name: Upload CLI platform package @@ -146,6 +198,7 @@ jobs: needs: [check-version, build-napi] if: needs.check-version.outputs.publish == 'true' runs-on: ubuntu-24.04 + timeout-minutes: 20 steps: - uses: actions/checkout@v4 - uses: ./.github/actions/setup-node-pnpm @@ -154,7 +207,7 @@ jobs: - name: Setup npm registry uses: actions/setup-node@v4 with: - node-version: 25 + node-version: ${{ env.NODE_VERSION }} registry-url: https://registry.npmjs.org/ - name: Download all platform artifacts uses: actions/download-artifact@v4 @@ -164,6 +217,7 @@ jobs: - name: Distribute artifacts to cli/npm/ directories shell: bash run: | + shopt -s nullglob for artifact_dir in artifacts/cli-napi-*/; do suffix=$(basename "$artifact_dir" | sed 's/cli-napi-//') target_dir="cli/npm/${suffix}" @@ -171,6 +225,37 @@ jobs: echo "Copying from ${artifact_dir} to ${target_dir}" cp "${artifact_dir}"*.node "$target_dir/" || { echo "ERROR: no .node files found in ${artifact_dir}"; exit 1; } done + - name: Validate CLI platform packages + shell: bash + run: | + shopt -s nullglob + expected_count=0 + while IFS= read -r binding_prefix; do + if [ -z "$binding_prefix" ]; then + continue + fi + expected_count=$((expected_count + 1)) + done <<< "$CLI_NATIVE_BINDING_PREFIXES" + for target_dir in cli/npm/*/; do + if [ ! -f "${target_dir}package.json" ]; then + continue + fi + actual_count=$(find "${target_dir}" -maxdepth 1 -type f -name '*.node' | wc -l | tr -d ' ') + if [ "$actual_count" -ne "$expected_count" ]; then + echo "ERROR: expected ${expected_count} .node files in ${target_dir}, found ${actual_count}" + exit 1 + fi + while IFS= read -r binding_prefix; do + if [ -z "$binding_prefix" ]; then + continue + fi + matches=("${target_dir}"${binding_prefix}*.node) + if [ "${#matches[@]}" -eq 0 ]; then + echo "ERROR: missing binding with prefix ${binding_prefix} in ${target_dir}" + exit 1 + fi + done <<< "$CLI_NATIVE_BINDING_PREFIXES" + done - name: Publish CLI platform sub-packages shell: bash env: @@ -188,15 +273,14 @@ jobs: needs: [check-version, publish-napi] if: needs.check-version.outputs.publish == 'true' runs-on: ubuntu-24.04 + timeout-minutes: 20 steps: - uses: actions/checkout@v4 - with: - token: ${{ secrets.GH_PAT }} - uses: ./.github/actions/setup-node-pnpm - name: Setup npm registry uses: actions/setup-node@v4 with: - node-version: 25 + node-version: ${{ env.NODE_VERSION }} registry-url: https://registry.npmjs.org/ - name: Build run: pnpm -F @truenine/memory-sync-cli run build @@ -210,6 +294,7 @@ jobs: build-binary: needs: [check-version, publish-napi] if: needs.check-version.outputs.publish == 'true' + timeout-minutes: 60 strategy: fail-fast: false matrix: @@ -296,6 +381,8 @@ jobs: release-gui-collect: needs: [check-gui-version, build-gui-all] if: needs.check-gui-version.outputs.should_release == 'true' + permissions: + contents: write uses: ./.github/workflows/release-gui-collect.yml with: version: ${{ needs.check-gui-version.outputs.version }} diff --git a/.github/workflows/release-gui-collect.yml b/.github/workflows/release-gui-collect.yml index fe81446a..09e3afa2 100644 --- a/.github/workflows/release-gui-collect.yml +++ b/.github/workflows/release-gui-collect.yml @@ -1,4 +1,4 @@ -name: Release GUI Collect +name: Collect GUI Release on: workflow_call: @@ -6,12 +6,6 @@ on: version: required: true type: string - workflow_dispatch: - inputs: - version: - description: 'Version to release (without v prefix, e.g. 2026.10213.0)' - required: true - type: string permissions: contents: write @@ -19,6 +13,7 @@ permissions: jobs: publish-release: runs-on: ubuntu-24.04 + timeout-minutes: 15 steps: - uses: actions/checkout@v4 @@ -27,22 +22,50 @@ jobs: with: path: artifacts pattern: gui-* + merge-multiple: true - name: Clean up unnecessary macOS artifacts run: | find artifacts -name '*.icns' -delete find artifacts -name 'Info.plist' -delete + - name: Verify release artifacts + shell: bash + run: | + installer_count=$(find artifacts -type f \( -name '*.dmg' -o -name '*.exe' -o -name '*.msi' -o -name '*.AppImage' -o -name '*.deb' -o -name '*.rpm' \) | wc -l | tr -d ' ') + updater_count=$(find artifacts -type f \( -name '*.sig' -o -name '*.tar.gz' -o -name '*.zip' \) | wc -l | tr -d ' ') + if [ "$installer_count" -eq 0 ]; then + echo "ERROR: no GUI installer artifacts were downloaded" + exit 1 + fi + if [ "$updater_count" -eq 0 ]; then + echo "ERROR: no GUI updater artifacts were downloaded" + exit 1 + fi + - name: Publish Release uses: softprops/action-gh-release@v2.5.0 with: tag_name: v${{ inputs.version }} name: v${{ inputs.version }} files: | + artifacts/*.dmg artifacts/**/*.dmg + artifacts/*.exe artifacts/**/*.exe + artifacts/*.msi + artifacts/**/*.msi + artifacts/*.AppImage artifacts/**/*.AppImage + artifacts/*.deb artifacts/**/*.deb + artifacts/*.rpm artifacts/**/*.rpm + artifacts/*.sig + artifacts/**/*.sig + artifacts/*.tar.gz + artifacts/**/*.tar.gz + artifacts/*.zip + artifacts/**/*.zip env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release-gui-linux.yml b/.github/workflows/release-gui-linux.yml index b87da61c..75747bab 100644 --- a/.github/workflows/release-gui-linux.yml +++ b/.github/workflows/release-gui-linux.yml @@ -1,15 +1,10 @@ -name: Release GUI Linux +name: Debug GUI Linux Artifact on: - workflow_call: - inputs: - version: - required: true - type: string workflow_dispatch: inputs: version: - description: 'Version to release (without v prefix, e.g. 2026.10213.0)' + description: 'Version to rebuild for debugging (without v prefix, e.g. 2026.10213.0)' required: true type: string @@ -17,24 +12,13 @@ permissions: contents: read jobs: - build-gui-linux: + build-gui-linux-debug: runs-on: ubuntu-24.04 + timeout-minutes: 45 steps: - uses: actions/checkout@v4 - uses: ./.github/actions/setup-node-pnpm - - name: Cache apt packages - uses: actions/cache@v4 - with: - path: /var/cache/apt/archives - key: apt-gtk-${{ runner.os }}-${{ hashFiles('.github/workflows/release-gui-linux.yml') }} - restore-keys: apt-gtk-${{ runner.os }}- - - - name: Install GTK development dependencies - run: | - sudo apt-get update - sudo apt-get install -y --no-install-recommends libgtk-3-dev libglib2.0-dev pkg-config libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf - - uses: ./.github/actions/setup-tauri with: version: ${{ inputs.version }} @@ -50,17 +34,19 @@ jobs: shell: bash run: | echo "=== Finding all bundle artifacts ===" - find gui/src-tauri/target -path '*/bundle/*' -type f \( -name '*.AppImage' -o -name '*.deb' -o -name '*.rpm' \) 2>/dev/null || echo 'No bundle files found' + find target -path '*/bundle/*' -type f \( -name '*.AppImage' -o -name '*.deb' -o -name '*.rpm' -o -name '*.sig' \) 2>/dev/null || echo 'No bundle files found' - name: Upload artifacts uses: actions/upload-artifact@v4 with: - name: gui-ubuntu-24.04 + name: debug-gui-ubuntu-24.04 path: | - gui/src-tauri/target/*/release/bundle/**/*.AppImage - gui/src-tauri/target/release/bundle/**/*.AppImage - gui/src-tauri/target/*/release/bundle/**/*.deb - gui/src-tauri/target/release/bundle/**/*.deb - gui/src-tauri/target/*/release/bundle/**/*.rpm - gui/src-tauri/target/release/bundle/**/*.rpm + target/*/release/bundle/**/*.AppImage + target/release/bundle/**/*.AppImage + target/*/release/bundle/**/*.deb + target/release/bundle/**/*.deb + target/*/release/bundle/**/*.rpm + target/release/bundle/**/*.rpm + target/*/release/bundle/**/*.sig + target/release/bundle/**/*.sig if-no-files-found: error diff --git a/.github/workflows/release-gui-macos.yml b/.github/workflows/release-gui-macos.yml index e91c304e..bb948a43 100644 --- a/.github/workflows/release-gui-macos.yml +++ b/.github/workflows/release-gui-macos.yml @@ -1,15 +1,10 @@ -name: Release GUI macOS +name: Debug GUI macOS Artifact on: - workflow_call: - inputs: - version: - required: true - type: string workflow_dispatch: inputs: version: - description: 'Version to release (without v prefix, e.g. 2026.10213.0)' + description: 'Version to rebuild for debugging (without v prefix, e.g. 2026.10213.0)' required: true type: string @@ -17,8 +12,9 @@ permissions: contents: read jobs: - build-gui-macos: + build-gui-macos-debug: runs-on: macos-14 + timeout-minutes: 60 steps: - uses: actions/checkout@v4 - uses: ./.github/actions/setup-node-pnpm @@ -38,13 +34,17 @@ jobs: shell: bash run: | echo "=== Finding all bundle artifacts ===" - find gui/src-tauri/target -path '*/bundle/*' -type f -name '*.dmg' 2>/dev/null || echo 'No bundle files found' + find target -path '*/bundle/*' -type f \( -name '*.dmg' -o -name '*.tar.gz' -o -name '*.sig' \) 2>/dev/null || echo 'No bundle files found' - name: Upload artifacts uses: actions/upload-artifact@v4 with: - name: gui-macos-14 + name: debug-gui-macos-14 path: | - gui/src-tauri/target/*/release/bundle/**/*.dmg - gui/src-tauri/target/release/bundle/**/*.dmg + target/*/release/bundle/**/*.dmg + target/*/release/bundle/**/*.tar.gz + target/*/release/bundle/**/*.sig + target/release/bundle/**/*.dmg + target/release/bundle/**/*.tar.gz + target/release/bundle/**/*.sig if-no-files-found: error diff --git a/.github/workflows/release-gui-win.yml b/.github/workflows/release-gui-win.yml index c854168f..623fe8a7 100644 --- a/.github/workflows/release-gui-win.yml +++ b/.github/workflows/release-gui-win.yml @@ -1,15 +1,10 @@ -name: Release GUI Windows +name: Debug GUI Windows Artifact on: - workflow_call: - inputs: - version: - required: true - type: string workflow_dispatch: inputs: version: - description: 'Version to release (without v prefix, e.g. 2026.10213.0)' + description: 'Version to rebuild for debugging (without v prefix, e.g. 2026.10213.0)' required: true type: string @@ -17,8 +12,9 @@ permissions: contents: read jobs: - build-gui-win: + build-gui-win-debug: runs-on: windows-latest + timeout-minutes: 45 steps: - uses: actions/checkout@v4 - uses: ./.github/actions/setup-node-pnpm @@ -37,13 +33,15 @@ jobs: shell: bash run: | echo "=== Finding all bundle artifacts ===" - find gui/src-tauri/target -path '*/bundle/*' -type f -name '*.exe' 2>/dev/null || echo 'No bundle files found' + find target -path '*/bundle/*' -type f \( -name '*.exe' -o -name '*.sig' \) 2>/dev/null || echo 'No bundle files found' - name: Upload artifacts uses: actions/upload-artifact@v4 with: - name: gui-windows-latest + name: debug-gui-windows-latest path: | - gui/src-tauri/target/*/release/bundle/**/*.exe - gui/src-tauri/target/release/bundle/**/*.exe + target/*/release/bundle/**/*.exe + target/*/release/bundle/**/*.sig + target/release/bundle/**/*.exe + target/release/bundle/**/*.sig if-no-files-found: error diff --git a/Cargo.lock b/Cargo.lock index a45f4f81..fdc1fd4e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2071,7 +2071,7 @@ dependencies = [ [[package]] name = "memory-sync-gui" -version = "2026.10303.11117" +version = "2026.10314.10606" dependencies = [ "dirs", "proptest", @@ -4439,7 +4439,7 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tnmsc" -version = "2026.10222.0" +version = "2026.10314.10606" dependencies = [ "clap", "dirs", @@ -4459,8 +4459,9 @@ dependencies = [ [[package]] name = "tnmsc-logger" -version = "2026.10222.0" +version = "2026.10314.10606" dependencies = [ + "chrono", "napi", "napi-build", "napi-derive", @@ -4470,7 +4471,7 @@ dependencies = [ [[package]] name = "tnmsc-md-compiler" -version = "2026.10222.0" +version = "2026.10314.10606" dependencies = [ "markdown", "napi", @@ -4483,6 +4484,19 @@ dependencies = [ "tnmsc-logger", ] +[[package]] +name = "tnmsc-script-runtime" +version = "2026.10314.10606" +dependencies = [ + "napi", + "napi-build", + "napi-derive", + "serde", + "serde_json", + "tempfile", + "wait-timeout", +] + [[package]] name = "tokio" version = "1.49.0" diff --git a/Cargo.toml b/Cargo.toml index d91b17fd..749b88e5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,11 +4,12 @@ members = [ "cli", "libraries/logger", "libraries/md-compiler", + "libraries/script-runtime", "gui/src-tauri", ] [workspace.package] -version = "2026.10222.0" +version = "2026.10314.10606" edition = "2024" license = "AGPL-3.0-only" authors = ["TrueNine"] @@ -19,6 +20,7 @@ repository = "https://github.com/TrueNine/memory-sync" tnmsc = { path = "cli" } tnmsc-logger = { path = "libraries/logger" } tnmsc-md-compiler = { path = "libraries/md-compiler" } +tnmsc-script-runtime = { path = "libraries/script-runtime" } # Serialization serde = { version = "1", features = ["derive"] } diff --git a/SECURITY.md b/SECURITY.md index 86fff327..bbd8fa2d 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -39,7 +39,7 @@ Don't rush. `memory-sync` is a CLI tool that **reads source files only and writes target configs only**. Its security boundary: -- **Reads**: user `.cn.mdx` source files, project config files (`.tnmsc.json`) +- **Reads**: user `.src.mdx` source files, project config files (`.tnmsc.json`) - **Writes**: target tool config directories (`.cursor/`, `.claude/`, `.kiro/`, etc.) - **Cleans**: removes stale files from target directories during sync diff --git a/cli/eslint.config.ts b/cli/eslint.config.ts index 8418913e..6ee69077 100644 --- a/cli/eslint.config.ts +++ b/cli/eslint.config.ts @@ -17,6 +17,8 @@ const config = eslint10({ ignores: [ '.turbo/**', 'aindex/**', + 'npm/**/noop.cjs', + 'npm/**/noop.d.ts', '*.md', '**/*.md', '*.toml', diff --git a/cli/npm/darwin-arm64/package.json b/cli/npm/darwin-arm64/package.json index 3d4c3677..42e7d23c 100644 --- a/cli/npm/darwin-arm64/package.json +++ b/cli/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-arm64", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "os": [ "darwin" ], diff --git a/cli/npm/darwin-x64/package.json b/cli/npm/darwin-x64/package.json index 672baad9..4b74ea45 100644 --- a/cli/npm/darwin-x64/package.json +++ b/cli/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-x64", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "os": [ "darwin" ], diff --git a/cli/npm/linux-arm64-gnu/package.json b/cli/npm/linux-arm64-gnu/package.json index 9b3340b0..04f1735e 100644 --- a/cli/npm/linux-arm64-gnu/package.json +++ b/cli/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-arm64-gnu", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "os": [ "linux" ], diff --git a/cli/npm/linux-x64-gnu/package.json b/cli/npm/linux-x64-gnu/package.json index 417b2b03..62c5d92c 100644 --- a/cli/npm/linux-x64-gnu/package.json +++ b/cli/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-x64-gnu", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "os": [ "linux" ], diff --git a/cli/npm/win32-x64-msvc/package.json b/cli/npm/win32-x64-msvc/package.json index ad8f3101..d5c0c41e 100644 --- a/cli/npm/win32-x64-msvc/package.json +++ b/cli/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-win32-x64-msvc", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "os": [ "win32" ], diff --git a/cli/package.json b/cli/package.json index 44a2314a..d4882124 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/memory-sync-cli", "type": "module", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "description": "TrueNine Memory Synchronization CLI", "author": "TrueNine", "license": "AGPL-3.0-only", @@ -51,8 +51,8 @@ "scripts": { "build": "run-s build:deps build:napi bundle generate:schema check", "build:napi": "tsx ../scripts/copy-napi.ts", - "build:deps": "pnpm -F @truenine/logger -F @truenine/md-compiler run build", - "bundle": "pnpm exec tsdown", + "build:deps": "pnpm -F @truenine/logger -F @truenine/md-compiler -F @truenine/script-runtime run build", + "bundle": "tsx ../scripts/build-quiet.ts", "check": "run-p typecheck lint", "generate:schema": "tsx scripts/generate-schema.ts", "lint": "eslint --cache .", @@ -64,6 +64,7 @@ }, "dependencies": { "@clack/prompts": "catalog:", + "@truenine/script-runtime": "workspace:*", "fast-glob": "catalog:", "fs-extra": "catalog:", "jiti": "2.6.1", diff --git a/cli/src/Aindex.ts b/cli/src/Aindex.ts index 8924cbbf..b67a16fd 100644 --- a/cli/src/Aindex.ts +++ b/cli/src/Aindex.ts @@ -2,7 +2,7 @@ * Aindex validation and generation utilities * 使用扁平的 bundles 结构直接遍历创建项目目录和文件 */ -import type {ILogger} from './plugins/plugin-shared' +import type {AindexConfig, ILogger} from './plugins/plugin-core' import * as fs from 'node:fs' import * as path from 'node:path' @@ -53,6 +53,14 @@ export interface GenerationResult { export interface GenerationOptions { /** Logger instance */ readonly logger?: ILogger + /** Aindex structure from user config */ + readonly config?: Required +} + +const DEFAULT_FILE_CONTENT = '# Generated by tnmsc init\n' + +function isFilePath(relativePath: string): boolean { + return path.extname(relativePath).length > 0 } /** @@ -62,21 +70,67 @@ export function generateAindex( rootPath: string, options: GenerationOptions = {} ): GenerationResult { - const {logger} = options + const {logger, config} = options const createdDirs: string[] = [] const createdFiles: string[] = [] const existedDirs: string[] = [] const existedFiles: string[] = [] const createdDirsSet = new Set() + const existedDirsSet = new Set() + const existedFilesSet = new Set() + + const ensureDirectory = (dirPath: string): void => { + if (fs.existsSync(dirPath)) { + if (!existedDirsSet.has(dirPath)) { + existedDirsSet.add(dirPath) + existedDirs.push(dirPath) + logger?.debug('directory exists', {path: dirPath}) + } + return + } + + fs.mkdirSync(dirPath, {recursive: true}) + + let currentDir = dirPath + while (!createdDirsSet.has(currentDir)) { + createdDirsSet.add(currentDir) + createdDirs.push(currentDir) + logger?.info('created directory', {path: currentDir}) + + if (currentDir === rootPath) break + currentDir = path.dirname(currentDir) + } + } + + const ensureFile = (filePath: string, content: string = DEFAULT_FILE_CONTENT): void => { + ensureDirectory(path.dirname(filePath)) + + if (fs.existsSync(filePath)) { + if (!existedFilesSet.has(filePath)) { + existedFilesSet.add(filePath) + existedFiles.push(filePath) + logger?.debug('file exists', {path: filePath}) + } + return + } + + fs.writeFileSync(filePath, content, 'utf8') + createdFiles.push(filePath) + logger?.info('created file', {path: filePath}) + } + + ensureDirectory(rootPath) + + if (config != null) { + for (const [key, moduleConfig] of Object.entries(config)) { + if (key === 'dir' || typeof moduleConfig !== 'object' || moduleConfig == null) continue - if (fs.existsSync(rootPath)) { - existedDirs.push(rootPath) - logger?.debug('directory exists', {path: rootPath}) - } else { - fs.mkdirSync(rootPath, {recursive: true}) - createdDirs.push(rootPath) - createdDirsSet.add(rootPath) - logger?.info('created directory', {path: rootPath}) + for (const relativePath of [moduleConfig.src, moduleConfig.dist]) { + const targetPath = path.join(rootPath, relativePath) + if (isFilePath(relativePath)) ensureFile(targetPath) + else ensureDirectory(targetPath) + } + } } return { diff --git a/cli/src/ConfigLoader.test.ts b/cli/src/ConfigLoader.test.ts new file mode 100644 index 00000000..dba44920 --- /dev/null +++ b/cli/src/ConfigLoader.test.ts @@ -0,0 +1,55 @@ +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {afterEach, describe, expect, it, vi} from 'vitest' +import {createLogger} from './plugins/plugin-core' + +const mockedGuardModule = vi.hoisted(() => ({ + protectedViolation: { + targetPath: '', + protectedPath: '', + protectionMode: 'direct' as const, + source: 'test', + reason: 'test' + }, + getProtectedPathViolationMock: vi.fn(), + logProtectedDeletionGuardErrorMock: vi.fn() +})) + +mockedGuardModule.getProtectedPathViolationMock.mockImplementation(() => mockedGuardModule.protectedViolation) + +vi.mock('./ProtectedDeletionGuard', async () => { + const actual = await vi.importActual('./ProtectedDeletionGuard') + return { + ...actual, + getProtectedPathViolation: mockedGuardModule.getProtectedPathViolationMock, + logProtectedDeletionGuardError: mockedGuardModule.logProtectedDeletionGuardErrorMock + } +}) + +describe('ensureConfigLink', () => { + afterEach(() => { + vi.clearAllMocks() + mockedGuardModule.getProtectedPathViolationMock.mockImplementation(() => mockedGuardModule.protectedViolation) + }) + + it('blocks deleting a protected config path during link replacement', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-config-link-guard-')) + const localConfigPath = path.join(tempDir, '.tnmsc.json') + const globalConfigPath = path.join(tempDir, 'global-target.json') + const {ensureConfigLink} = await import('./ConfigLoader') + + try { + fs.writeFileSync(localConfigPath, '{"logLevel":"info"}', 'utf8') + fs.writeFileSync(globalConfigPath, '{"logLevel":"warn"}', 'utf8') + + expect(() => ensureConfigLink(localConfigPath, globalConfigPath, createLogger('ensureConfigLinkTest', 'silent'))) + .toThrow('Protected deletion guard blocked config-link-replacement') + expect(fs.readFileSync(localConfigPath, 'utf8')).toBe('{"logLevel":"info"}') + expect(mockedGuardModule.logProtectedDeletionGuardErrorMock).toHaveBeenCalledOnce() + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/ConfigLoader.ts b/cli/src/ConfigLoader.ts index 299277aa..eab48053 100644 --- a/cli/src/ConfigLoader.ts +++ b/cli/src/ConfigLoader.ts @@ -1,9 +1,24 @@ -import type {AindexConfig, ConfigLoaderOptions, ConfigLoadResult, ILogger, UserConfigFile} from './plugins/plugin-shared' +import type { + AindexConfig, + CleanupProtectionOptions, + ConfigLoaderOptions, + ConfigLoadResult, + ILogger, + OutputScopeOptions, + PluginOutputScopeTopics, + UserConfigFile +} from './plugins/plugin-core' import * as fs from 'node:fs' import * as os from 'node:os' import * as path from 'node:path' import process from 'node:process' -import {convertUserConfigAindexToShadowSourceProject, createLogger, DEFAULT_USER_CONFIG, ZUserConfigFile} from './plugins/plugin-shared' +import {createLogger, ZUserConfigFile} from './plugins/plugin-core' +import { + createProtectedDeletionGuard, + getProtectedPathViolation, + logProtectedDeletionGuardError, + ProtectedDeletionGuardError +} from './ProtectedDeletionGuard' /** * Default config file name @@ -22,15 +37,6 @@ export function getGlobalConfigPath(): string { return path.join(os.homedir(), DEFAULT_GLOBAL_CONFIG_DIR, DEFAULT_CONFIG_FILE_NAME) } -/** - * Get default user config content - * Uses build-time injected template from public/tnmsc.example.json - * @deprecated Config is now required - no default config is provided - */ -export function getDefaultUserConfig(): UserConfigFile { - return {...DEFAULT_USER_CONFIG} -} - /** * Validation result for global config */ @@ -94,8 +100,8 @@ export class ConfigLoader { return {config, source: resolvedPath, found: true} } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) // Parse/validation failure - throw error instead of silently returning empty config - throw new Error(`Failed to load config from ${resolvedPath}: ${errorMessage}`) + this.logger.warn('load failed', {path: resolvedPath, error}) + return {config: {}, source: null, found: false} } } @@ -108,17 +114,13 @@ export class ConfigLoader { if (result.found) loadedConfigs.push(result) } - if (loadedConfigs.length === 0) { // No config found - throw error instead of returning empty config - throw new Error(`No valid config file found. Searched: ${searchPaths.join(', ')}`) - } - const merged = this.mergeConfigs(loadedConfigs.map(r => r.config)) // Merge configs (first has highest priority) const sources = loadedConfigs.map(r => r.source).filter((s): s is string => s !== null) return { config: merged, sources, - found: true + found: loadedConfigs.length > 0 } } @@ -133,9 +135,7 @@ export class ConfigLoader { } const result = ZUserConfigFile.safeParse(parsed) - if (result.success) { - return convertUserConfigAindexToShadowSourceProject(result.data) // Convert aindex format to shadowSourceProject format if needed - } + if (result.success) return result.data const errors = result.error.issues.map((i: {path: (string | number)[], message: string}) => `${i.path.join('.')}: ${i.message}`) // Validation failed - throw error instead of returning empty config throw new Error(`Config validation failed in ${filePath}:\n${errors.join('\n')}`) @@ -151,11 +151,18 @@ export class ConfigLoader { return reversed.reduce((acc, config) => { const mergedAindex = this.mergeAindex(acc.aindex, config.aindex) + const mergedOutputScopes = this.mergeOutputScopeOptions(acc.outputScopes, config.outputScopes) + const mergedCleanupProtection = this.mergeCleanupProtectionOptions( + acc.cleanupProtection, + config.cleanupProtection + ) return { ...acc, ...config, - ...mergedAindex != null ? {aindex: mergedAindex} : {} + ...mergedAindex != null ? {aindex: mergedAindex} : {}, + ...mergedOutputScopes != null ? {outputScopes: mergedOutputScopes} : {}, + ...mergedCleanupProtection != null ? {cleanupProtection: mergedCleanupProtection} : {} } }, {}) } @@ -181,6 +188,53 @@ export class ConfigLoader { } } + private mergeOutputScopeTopics( + a?: PluginOutputScopeTopics, + b?: PluginOutputScopeTopics + ): PluginOutputScopeTopics | undefined { + if (a == null && b == null) return void 0 + if (a == null) return b + if (b == null) return a + return {...a, ...b} + } + + private mergeOutputScopeOptions( + a?: OutputScopeOptions, + b?: OutputScopeOptions + ): OutputScopeOptions | undefined { + if (a == null && b == null) return void 0 + if (a == null) return b + if (b == null) return a + + const mergedPlugins: Record = {} + for (const [pluginName, topics] of Object.entries(a.plugins ?? {})) { + if (topics != null) mergedPlugins[pluginName] = {...topics} + } + for (const [pluginName, topics] of Object.entries(b.plugins ?? {})) { + const mergedTopics = this.mergeOutputScopeTopics(mergedPlugins[pluginName], topics) + if (mergedTopics != null) mergedPlugins[pluginName] = mergedTopics + } + + if (Object.keys(mergedPlugins).length === 0) return {} + return {plugins: mergedPlugins} + } + + private mergeCleanupProtectionOptions( + a?: CleanupProtectionOptions, + b?: CleanupProtectionOptions + ): CleanupProtectionOptions | undefined { + if (a == null && b == null) return void 0 + if (a == null) return b + if (b == null) return a + + return { + rules: [ + ...a.rules ?? [], + ...b.rules ?? [] + ] + } + } + private resolveTilde(p: string): string { if (p.startsWith('~')) return path.join(os.homedir(), p.slice(1)) return p @@ -218,6 +272,82 @@ export function loadUserConfig(cwd?: string): MergedConfigResult { return getConfigLoader().load(cwd) } +function isSymlinkPath(filePath: string): boolean { + try { + return fs.lstatSync(filePath).isSymbolicLink() + } + catch { + return false + } +} + +function readSymlinkTarget(filePath: string): string | null { + try { + return fs.readlinkSync(filePath) + } + catch { + return null + } +} + +function assertConfigDeletionAllowed( + targetPath: string, + logger: ILogger +): void { + const violation = getProtectedPathViolation(targetPath, createProtectedDeletionGuard()) + if (violation == null) return + + logProtectedDeletionGuardError(logger, 'config-link-replacement', [violation]) + throw new ProtectedDeletionGuardError('config-link-replacement', [violation]) +} + +/** + * Ensure a local config file is linked (symlink preferred) to the global config. + * Falls back to a file copy when symlink creation is unavailable. + */ +export function ensureConfigLink( + localConfigPath: string, + globalConfigPath: string, + logger: ILogger +): void { + if (!fs.existsSync(globalConfigPath)) return + + if (fs.existsSync(localConfigPath) || isSymlinkPath(localConfigPath)) { + if (isSymlinkPath(localConfigPath)) { + const target = readSymlinkTarget(localConfigPath) + if (target !== null && path.resolve(path.dirname(localConfigPath), target) === path.resolve(globalConfigPath)) return + assertConfigDeletionAllowed(localConfigPath, logger) + fs.rmSync(localConfigPath, {force: true}) + } else { + const localContent = fs.readFileSync(localConfigPath, 'utf8') + const globalContent = fs.readFileSync(globalConfigPath, 'utf8') + if (localContent !== globalContent) { + fs.copyFileSync(localConfigPath, globalConfigPath) + logger.debug('synced local config back to global', {src: localConfigPath, dest: globalConfigPath}) + } + assertConfigDeletionAllowed(localConfigPath, logger) + fs.rmSync(localConfigPath, {force: true}) + } + } + + try { + fs.symlinkSync(globalConfigPath, localConfigPath, 'file') + logger.debug('linked config', {link: localConfigPath, target: globalConfigPath}) + } + catch { + try { + fs.copyFileSync(globalConfigPath, localConfigPath) + logger.warn('symlink unavailable, copied config (auto-sync disabled)', {dest: localConfigPath}) + } + catch (copyErr) { + logger.warn('failed to link or copy config', { + path: localConfigPath, + error: copyErr instanceof Error ? copyErr.message : String(copyErr) + }) + } + } +} + /** * Validate global config file strictly. * - If config doesn't exist: return invalid result (do not auto-create) @@ -299,11 +429,3 @@ export function validateGlobalConfig(): GlobalConfigValidationResult { shouldExit: false } } - -/** - * @deprecated Use validateGlobalConfig() instead. This function is kept for backward compatibility - * but no longer auto-creates default config. - */ -export function validateAndEnsureGlobalConfig(): GlobalConfigValidationResult { - return validateGlobalConfig() -} diff --git a/cli/src/PluginPipeline.ts b/cli/src/PluginPipeline.ts index b8d030bf..c3ba9a4d 100644 --- a/cli/src/PluginPipeline.ts +++ b/cli/src/PluginPipeline.ts @@ -1,40 +1,16 @@ import type {MdxGlobalScope} from '@truenine/md-compiler/globals' -import type {CollectedInputContext, ILogger, InputPlugin, InputPluginContext, OutputCleanContext, OutputPlugin, OutputWriteContext, PluginOptions, UserConfigFile} from './plugins/plugin-shared' -import type {Command, CommandContext} from '@/commands' +import type {ILogger, InputCollectedContext, InputPlugin, InputPluginContext, OutputCleanContext, OutputCollectedContext, OutputPlugin, OutputWriteContext, PluginOptions, UserConfigFile} from './plugins/plugin-core' +import type {Command, CommandContext, CommandResult} from '@/commands/Command' import type {PipelineConfig} from '@/config' -import type {ParsedCliArgs} from '@/pipeline' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import * as fs from 'node:fs' import * as path from 'node:path' -import {GlobalScopeCollector, ScopePriority, ScopeRegistry} from '@truenine/plugin-input-shared' import glob from 'fast-glob' -import { - buildDependencyContext, - extractUserArgs, - mergeContexts, - parseArgs, - - resolveCommand, - resolveLogLevel, - topologicalSort -} from '@/pipeline' -import {startupVersionCheck} from '@/versionCheck' -import {createLogger, setGlobalLogLevel} from './plugins/plugin-shared' - -export type { - LogLevel, - ParsedCliArgs, - Subcommand -} from '@/pipeline' // Re-export types for backwards compatibility - -export { // Re-export functions for backwards compatibility - buildDependencyGraph, - extractUserArgs, - parseArgs, - resolveCommand, - resolveLogLevel, - topologicalSort, - validateDependencies -} from '@/pipeline' +import {JsonOutputCommand} from '@/commands/JsonOutputCommand' +import {extractUserArgs, parseArgs, resolveCommand} from '@/pipeline/CliArgumentParser' +import {buildDependencyContext, mergeContexts} from '@/pipeline/ContextMerger' +import {topologicalSort} from '@/pipeline/PluginDependencyResolver' +import {createLogger, GlobalScopeCollector, ScopePriority, ScopeRegistry, setGlobalLogLevel} from './plugins/plugin-core' /** * Plugin Pipeline - Orchestrates plugin execution @@ -54,7 +30,7 @@ export class PluginPipeline { const userArgs = extractUserArgs(filtered) this.args = parseArgs(userArgs) - const resolvedLogLevel = resolveLogLevel(this.args) // Resolve log level from parsed args and set globally + const resolvedLogLevel = this.args.logLevel // Resolve log level from parsed args and set globally if (resolvedLogLevel != null) setGlobalLogLevel(resolvedLogLevel) this.logger = createLogger('PluginPipeline', resolvedLogLevel) this.logger.debug('initialized', {args: this.args}) @@ -65,58 +41,52 @@ export class PluginPipeline { return this } - async run(config: PipelineConfig): Promise { - void startupVersionCheck(this.logger) // Don't await - let it run in background without blocking process exit // Startup version check (runs on even minutes, non-blocking) - + async run(config: PipelineConfig): Promise { const {context, outputPlugins, userConfigOptions} = config this.registerOutputPlugins([...outputPlugins]) - let command: Command = this.resolveCommand() + let command: Command = resolveCommand(this.args) if (this.args.jsonFlag) { setGlobalLogLevel('silent') // Suppress all console logging in JSON mode const selfJsonCommands = new Set(['config-show', 'plugins']) // only need log suppression, not JsonOutputCommand wrapping // Commands that handle their own JSON output (config --show, plugins) - if (!selfJsonCommands.has(command.name)) command = new (await import('@/commands')).JsonOutputCommand(command) + if (!selfJsonCommands.has(command.name)) command = new JsonOutputCommand(command) } const commandCtx = this.createCommandContext(context, userConfigOptions) - await command.execute(commandCtx) - } - - private resolveCommand(): Command { - return resolveCommand(this.args) + return command.execute(commandCtx) } - private createCommandContext(ctx: CollectedInputContext, userConfigOptions: Required): CommandContext { + private createCommandContext(ctx: OutputCollectedContext, userConfigOptions: Required): CommandContext { return { logger: this.logger, outputPlugins: this.outputPlugins, - collectedInputContext: ctx, + collectedOutputContext: ctx, userConfigOptions, createCleanContext: (dryRun: boolean) => this.createCleanContext(ctx, dryRun), createWriteContext: (dryRun: boolean) => this.createWriteContext(ctx, dryRun) } } - private createCleanContext(ctx: CollectedInputContext, dryRun: boolean): OutputCleanContext { + private createCleanContext(ctx: OutputCollectedContext, dryRun: boolean): OutputCleanContext { return { logger: this.logger, fs, path, glob, - collectedInputContext: ctx, + collectedOutputContext: ctx, dryRun } } - private createWriteContext(ctx: CollectedInputContext, dryRun: boolean): OutputWriteContext { + private createWriteContext(ctx: OutputCollectedContext, dryRun: boolean): OutputWriteContext { return { logger: this.logger, fs, path, glob, - collectedInputContext: ctx, + collectedOutputContext: ctx, dryRun, registeredPluginNames: this.outputPlugins.map(p => p.name) } @@ -127,7 +97,7 @@ export class PluginPipeline { baseCtx: Omit, dryRun: boolean = false, userConfig?: UserConfigFile - ): Promise> { + ): Promise> { if (plugins.length === 0) return {} const sortedPlugins = topologicalSort(plugins) as InputPlugin[] // Sort plugins by dependencies @@ -143,16 +113,18 @@ export class PluginPipeline { hasTool: Object.keys(globalScope.tool).length > 0 }) - const outputsByPlugin = new Map>() // Track outputs by plugin name for dependency resolution + const outputsByPlugin = new Map>() // Track outputs by plugin name for dependency resolution - let accumulatedContext: Partial = {} // Accumulated context from all executed plugins + let accumulatedContext: Partial = {} // Accumulated context from all executed plugins for (const plugin of sortedPlugins) { - const dependencyContext = this.buildDependencyContext(plugin, outputsByPlugin) // Build dependency context from direct dependencies only + const dependencyContext = buildDependencyContext(plugin, outputsByPlugin, mergeContexts) // Build dependency context from direct dependencies only + const runtimeCommand = this.resolveRuntimeCommand() const ctx: InputPluginContext = { // Create context with dependency outputs, globalScope, and scopeRegistry ...baseCtx, dependencyContext, + ...runtimeCommand != null && {runtimeCommand}, globalScope, scopeRegistry } @@ -179,10 +151,12 @@ export class PluginPipeline { return accumulatedContext } - private buildDependencyContext( - plugin: InputPlugin, - outputsByPlugin: Map> - ): Partial { - return buildDependencyContext(plugin, outputsByPlugin, mergeContexts) + private resolveRuntimeCommand(): InputPluginContext['runtimeCommand'] { + if (this.args.helpFlag || this.args.versionFlag || this.args.unknownCommand != null) return void 0 + if (this.args.subcommand === 'clean') return 'clean' + if (this.args.subcommand === 'plugins') return 'plugins' + if (this.args.subcommand === 'dry-run' || this.args.dryRun) return 'dry-run' + if (this.args.subcommand == null) return 'execute' + return void 0 } } diff --git a/cli/src/ProtectedDeletionGuard.ts b/cli/src/ProtectedDeletionGuard.ts new file mode 100644 index 00000000..69a3e68d --- /dev/null +++ b/cli/src/ProtectedDeletionGuard.ts @@ -0,0 +1,617 @@ +import type {OutputCollectedContext, PluginOptions} from './plugins/plugin-core' +import type {PublicDefinitionResolveOptions} from './public-config-paths' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import process from 'node:process' +import glob from 'fast-glob' +import {collectKnownPublicConfigDefinitionPaths} from './public-config-paths' + +interface DirPathLike { + readonly path: string + readonly pathKind?: string + readonly basePath?: string + readonly getAbsolutePath?: () => string +} + +export type ProtectionMode = 'direct' | 'recursive' +export type ProtectionRuleMatcher = 'path' | 'glob' + +export interface ProtectedPathRule { + readonly path: string + readonly protectionMode: ProtectionMode + readonly reason: string + readonly source: string + readonly matcher?: ProtectionRuleMatcher +} + +interface CompiledProtectedPathRule extends ProtectedPathRule { + readonly comparisonKeys: readonly string[] + readonly normalizedPath: string + readonly specificity: number +} + +export interface ProtectedPathViolation { + readonly targetPath: string + readonly protectedPath: string + readonly protectionMode: ProtectionMode + readonly reason: string + readonly source: string +} + +export interface ProtectedDeletionGuard { + readonly rules: readonly ProtectedPathRule[] + readonly exactProtectedPaths: readonly string[] + readonly subtreeProtectedPaths: readonly string[] + readonly compiledRules: readonly CompiledProtectedPathRule[] +} + +export interface ProtectedDeletionGuardOptions { + readonly workspaceDir?: string + readonly aindexDir?: string + readonly projectRoots?: readonly string[] + readonly exactProtectedPaths?: readonly string[] + readonly subtreeProtectedPaths?: readonly string[] + readonly rules?: readonly ProtectedPathRule[] + readonly includeReservedWorkspaceContentRoots?: boolean +} + +export class ProtectedDeletionGuardError extends Error { + readonly operation: string + + readonly violations: readonly ProtectedPathViolation[] + + constructor(operation: string, violations: readonly ProtectedPathViolation[]) { + super(buildProtectedDeletionGuardMessage(operation, violations)) + this.name = 'ProtectedDeletionGuardError' + this.operation = operation + this.violations = violations + } +} + +const CONFIGURED_AINDEX_DIRECTORY_KEYS = [ + 'skills', + 'commands', + 'subAgents', + 'rules', + 'app', + 'ext', + 'arch' +] as const satisfies readonly (keyof Required['aindex'])[] + +const CONFIGURED_AINDEX_FILE_KEYS = [ + 'globalPrompt', + 'workspacePrompt' +] as const satisfies readonly (keyof Required['aindex'])[] + +function resolveXdgConfigHome(homeDir: string): string { + const xdgConfigHome = process.env['XDG_CONFIG_HOME'] + if (typeof xdgConfigHome === 'string' && xdgConfigHome.trim().length > 0) return xdgConfigHome + return path.join(homeDir, '.config') +} + +function resolveXdgDataHome(homeDir: string): string { + const xdgDataHome = process.env['XDG_DATA_HOME'] + if (typeof xdgDataHome === 'string' && xdgDataHome.trim().length > 0) return xdgDataHome + return path.join(homeDir, '.local', 'share') +} + +function resolveXdgStateHome(homeDir: string): string { + const xdgStateHome = process.env['XDG_STATE_HOME'] + if (typeof xdgStateHome === 'string' && xdgStateHome.trim().length > 0) return xdgStateHome + return path.join(homeDir, '.local', 'state') +} + +function resolveXdgCacheHome(homeDir: string): string { + const xdgCacheHome = process.env['XDG_CACHE_HOME'] + if (typeof xdgCacheHome === 'string' && xdgCacheHome.trim().length > 0) return xdgCacheHome + return path.join(homeDir, '.cache') +} + +function resolveAbsolutePathFromDir(dir: DirPathLike | undefined): string | undefined { + if (dir == null) return void 0 + + if (typeof dir.getAbsolutePath === 'function') { + try { + const absolute = dir.getAbsolutePath() + if (absolute.length > 0) return path.resolve(absolute) + } + catch {} + } + + if (dir.pathKind === 'absolute') return path.resolve(dir.path) + if (typeof dir.basePath === 'string' && dir.basePath.length > 0) return path.resolve(dir.basePath, dir.path) + return void 0 +} + +export function expandHomePath(rawPath: string): string { + if (rawPath === '~') return os.homedir() + if (rawPath.startsWith('~/') || rawPath.startsWith('~\\')) return path.resolve(os.homedir(), rawPath.slice(2)) + return rawPath +} + +export function resolveAbsolutePath(rawPath: string): string { + return path.resolve(expandHomePath(rawPath)) +} + +function normalizeForComparison(rawPath: string): string { + const normalized = path.normalize(resolveAbsolutePath(rawPath)) + if (process.platform === 'win32') return normalized.toLowerCase() + return normalized +} + +function stripTrailingSeparator(rawPath: string): string { + const {root} = path.parse(rawPath) + if (rawPath === root) return rawPath + return rawPath.endsWith(path.sep) ? rawPath.slice(0, -1) : rawPath +} + +function isSameOrChildPath(candidate: string, parent: string): boolean { + const normalizedCandidate = stripTrailingSeparator(candidate) + const normalizedParent = stripTrailingSeparator(parent) + if (normalizedCandidate === normalizedParent) return true + return normalizedCandidate.startsWith(`${normalizedParent}${path.sep}`) +} + +export function buildComparisonKeys(rawPath: string): readonly string[] { + const absolute = resolveAbsolutePath(rawPath) + const keys = new Set([normalizeForComparison(absolute)]) + + try { + if (fs.existsSync(absolute)) { + const realPath = fs.realpathSync.native(absolute) + keys.add(normalizeForComparison(realPath)) + } + } + catch {} + + return [...keys] +} + +function createProtectedPathRule( + rawPath: string, + protectionMode: ProtectionMode, + reason: string, + source: string, + matcher: ProtectionRuleMatcher = 'path' +): ProtectedPathRule { + return { + path: resolveAbsolutePath(rawPath), + protectionMode, + reason, + source, + matcher + } +} + +function compileRule(rule: ProtectedPathRule): CompiledProtectedPathRule { + const normalizedPath = normalizeForComparison(rule.path) + return { + ...rule, + path: resolveAbsolutePath(rule.path), + comparisonKeys: buildComparisonKeys(rule.path), + normalizedPath, + specificity: stripTrailingSeparator(normalizedPath).length + } +} + +function dedupeAndCompileRules(rules: readonly ProtectedPathRule[]): CompiledProtectedPathRule[] { + const compiledByKey = new Map() + + for (const rule of rules) { + const compiled = compileRule(rule) + compiledByKey.set(`${compiled.protectionMode}:${compiled.normalizedPath}`, compiled) + } + + return [...compiledByKey.values()].sort((a, b) => { + const specificityDiff = b.specificity - a.specificity + if (specificityDiff !== 0) return specificityDiff + + if (a.protectionMode !== b.protectionMode) return a.protectionMode === 'recursive' ? -1 : 1 + return a.path.localeCompare(b.path) + }) +} + +function normalizeGlobPattern(pattern: string): string { + return resolveAbsolutePath(pattern).replaceAll('\\', '/') +} + +function expandProtectedPathRules(rules: readonly ProtectedPathRule[]): ProtectedPathRule[] { + const expandedRules: ProtectedPathRule[] = [] + + for (const rule of rules) { + if (rule.matcher !== 'glob') { + expandedRules.push(createProtectedPathRule(rule.path, rule.protectionMode, rule.reason, rule.source)) + continue + } + + const matchedPaths = glob.sync(normalizeGlobPattern(rule.path), { + onlyFiles: false, + dot: true, + absolute: true, + followSymbolicLinks: false + }) + + for (const matchedPath of matchedPaths) expandedRules.push(createProtectedPathRule(matchedPath, rule.protectionMode, rule.reason, rule.source)) + } + + return expandedRules +} + +function isRuleMatch(targetKey: string, ruleKey: string, protectionMode: ProtectionMode): boolean { + if (protectionMode === 'direct') return isSameOrChildPath(ruleKey, targetKey) + return isSameOrChildPath(targetKey, ruleKey) || isSameOrChildPath(ruleKey, targetKey) +} + +function detectPathProtectionMode(rawPath: string, fallback: ProtectionMode): ProtectionMode { + const absolutePath = resolveAbsolutePath(rawPath) + + try { + if (fs.existsSync(absolutePath) && fs.lstatSync(absolutePath).isDirectory()) return 'recursive' + } + catch {} + + return fallback +} + +function collectBuiltInDangerousPathRules(): ProtectedPathRule[] { + const homeDir = os.homedir() + + return [ + createProtectedPathRule(path.parse(homeDir).root, 'direct', 'built-in dangerous root path', 'built-in-dangerous-root'), + createProtectedPathRule(homeDir, 'direct', 'built-in dangerous home directory', 'built-in-dangerous-root'), + createProtectedPathRule(resolveXdgConfigHome(homeDir), 'direct', 'built-in dangerous config directory', 'built-in-dangerous-root'), + createProtectedPathRule(resolveXdgDataHome(homeDir), 'direct', 'built-in dangerous data directory', 'built-in-dangerous-root'), + createProtectedPathRule(resolveXdgStateHome(homeDir), 'direct', 'built-in dangerous state directory', 'built-in-dangerous-root'), + createProtectedPathRule(resolveXdgCacheHome(homeDir), 'direct', 'built-in dangerous cache directory', 'built-in-dangerous-root'), + createProtectedPathRule(path.join(homeDir, '.aindex'), 'direct', 'built-in global aindex directory', 'built-in-dangerous-root'), + createProtectedPathRule(path.join(homeDir, '.aindex', '.tnmsc.json'), 'direct', 'built-in global config file', 'built-in-config') + ] +} + +function collectWorkspaceReservedRules( + workspaceDir: string, + projectRoots: readonly string[], + includeReservedWorkspaceContentRoots: boolean +): ProtectedPathRule[] { + const rules: ProtectedPathRule[] = [ + createProtectedPathRule(workspaceDir, 'direct', 'workspace root', 'workspace-reserved'), + createProtectedPathRule(path.join(workspaceDir, 'aindex'), 'direct', 'reserved workspace aindex root', 'workspace-reserved'), + createProtectedPathRule(path.join(workspaceDir, 'knowladge'), 'direct', 'reserved workspace knowladge root', 'workspace-reserved') + ] + + for (const projectRoot of projectRoots) rules.push(createProtectedPathRule(projectRoot, 'direct', 'workspace project root', 'workspace-project-root')) + + if (includeReservedWorkspaceContentRoots) { + rules.push( + createProtectedPathRule( + path.join(workspaceDir, 'aindex', 'dist', '**', '*.mdx'), + 'direct', + 'reserved workspace aindex dist mdx files', + 'workspace-reserved', + 'glob' + ), + createProtectedPathRule( + path.join(workspaceDir, 'aindex', 'app', '**', '*.mdx'), + 'direct', + 'reserved workspace aindex app mdx files', + 'workspace-reserved', + 'glob' + ) + ) + } + + return rules +} + +function collectResolvedAindexRules(aindexDir: string): ProtectedPathRule[] { + return [createProtectedPathRule(aindexDir, 'direct', 'resolved aindex root', 'aindex-root')] +} + +export function collectKnownAindexInputConfigPaths( + aindexDir: string, + resolveOptions?: PublicDefinitionResolveOptions +): string[] { + return collectKnownPublicConfigDefinitionPaths(aindexDir, resolveOptions) +} + +export function collectConfiguredAindexInputRules( + pluginOptions: Required, + aindexDir: string, + resolveOptions?: PublicDefinitionResolveOptions +): ProtectedPathRule[] { + const rules: ProtectedPathRule[] = [] + + for (const key of CONFIGURED_AINDEX_DIRECTORY_KEYS) { + rules.push( + createProtectedPathRule( + path.join(aindexDir, pluginOptions.aindex[key].src), + 'recursive', + `configured aindex ${key} source directory`, + 'configured-aindex-source' + ) + ) + } + + for (const key of CONFIGURED_AINDEX_FILE_KEYS) { + rules.push( + createProtectedPathRule( + path.join(aindexDir, pluginOptions.aindex[key].src), + 'direct', + `configured aindex ${key} source file`, + 'configured-aindex-source' + ) + ) + } + + for (const protectedPath of collectKnownAindexInputConfigPaths(aindexDir, resolveOptions)) { + rules.push( + createProtectedPathRule( + protectedPath, + 'direct', + 'known aindex input config file', + 'known-aindex-config' + ) + ) + } + + return rules +} + +export function collectConfiguredAindexInputPaths( + pluginOptions: Required, + aindexDir: string, + resolveOptions?: PublicDefinitionResolveOptions +): string[] { + return collectConfiguredAindexInputRules(pluginOptions, aindexDir, resolveOptions).map(rule => rule.path) +} + +export function collectProtectedInputSourceRules( + collectedOutputContext: OutputCollectedContext +): ProtectedPathRule[] { + const rules: ProtectedPathRule[] = [] + const seen = new Set() + + const addRule = ( + rawPath: string | undefined, + protectionMode: ProtectionMode, + reason: string, + source: string + ): void => { + if (rawPath == null || rawPath.length === 0) return + + const rule = createProtectedPathRule(rawPath, protectionMode, reason, source) + const dedupeKey = `${rule.protectionMode}:${normalizeForComparison(rule.path)}` + if (seen.has(dedupeKey)) return + + seen.add(dedupeKey) + rules.push(rule) + } + + const addRuleFromDir = ( + dir: DirPathLike | undefined, + protectionMode: ProtectionMode, + reason: string, + source: string + ): void => { + const resolved = resolveAbsolutePathFromDir(dir) + if (resolved == null) return + addRule(resolved, protectionMode, reason, source) + } + + addRuleFromDir(collectedOutputContext.globalMemory?.dir as DirPathLike | undefined, 'recursive', 'global memory source directory', 'collected-input-source') + + for (const command of collectedOutputContext.commands ?? []) { + addRuleFromDir(command.dir as DirPathLike | undefined, 'recursive', 'command source directory', 'collected-input-source') + } + + for (const subAgent of collectedOutputContext.subAgents ?? []) { + addRuleFromDir(subAgent.dir as DirPathLike | undefined, 'recursive', 'sub-agent source directory', 'collected-input-source') + } + + for (const rule of collectedOutputContext.rules ?? []) { + addRuleFromDir(rule.dir as DirPathLike | undefined, 'recursive', 'rule source directory', 'collected-input-source') + } + + for (const skill of collectedOutputContext.skills ?? []) { + addRuleFromDir(skill.dir as DirPathLike | undefined, 'recursive', 'skill source directory', 'collected-input-source') + for (const childDoc of skill.childDocs ?? []) { + addRuleFromDir(childDoc.dir as DirPathLike | undefined, 'recursive', 'skill child document directory', 'collected-input-source') + } + for (const resource of skill.resources ?? []) { + if (resource.sourcePath == null || resource.sourcePath.length === 0) continue + addRule( + resource.sourcePath, + detectPathProtectionMode(resource.sourcePath, 'direct'), + 'skill resource source path', + 'collected-input-source' + ) + } + } + + for (const config of collectedOutputContext.vscodeConfigFiles ?? []) { + addRuleFromDir(config.dir as DirPathLike | undefined, 'direct', 'vscode input config file', 'collected-input-config') + } + + for (const config of collectedOutputContext.jetbrainsConfigFiles ?? []) { + addRuleFromDir(config.dir as DirPathLike | undefined, 'direct', 'jetbrains input config file', 'collected-input-config') + } + + for (const config of collectedOutputContext.editorConfigFiles ?? []) { + addRuleFromDir(config.dir as DirPathLike | undefined, 'direct', 'editorconfig input file', 'collected-input-config') + } + + for (const ignoreFile of collectedOutputContext.aiAgentIgnoreConfigFiles ?? []) { + addRule(ignoreFile.sourcePath, 'direct', 'AI agent ignore config file', 'collected-input-config') + } + + if (collectedOutputContext.aindexDir != null) { + for (const protectedPath of collectKnownAindexInputConfigPaths(collectedOutputContext.aindexDir, { + workspaceDir: collectedOutputContext.workspace.directory.path + })) { + addRule(protectedPath, 'direct', 'known aindex input config file', 'known-aindex-config') + } + } + + return rules +} + +export function collectProtectedInputSourcePaths(collectedOutputContext: OutputCollectedContext): string[] { + return collectProtectedInputSourceRules(collectedOutputContext).map(rule => rule.path) +} + +function collectLegacyCompatibilityRules(options: ProtectedDeletionGuardOptions): ProtectedPathRule[] { + const rules: ProtectedPathRule[] = [] + + for (const protectedPath of options.exactProtectedPaths ?? []) { + rules.push(createProtectedPathRule(protectedPath, 'direct', 'legacy direct protected path', 'legacy-direct')) + } + + for (const protectedPath of options.subtreeProtectedPaths ?? []) { + rules.push(createProtectedPathRule(protectedPath, 'recursive', 'legacy recursive protected path', 'legacy-recursive')) + } + + return rules +} + +export function createProtectedDeletionGuard( + options: ProtectedDeletionGuardOptions = {} +): ProtectedDeletionGuard { + const includeReservedWorkspaceContentRoots = options.includeReservedWorkspaceContentRoots ?? true + const rules: ProtectedPathRule[] = [ + ...collectBuiltInDangerousPathRules(), + ...collectLegacyCompatibilityRules(options), + ...options.workspaceDir != null + ? collectWorkspaceReservedRules( + options.workspaceDir, + options.projectRoots ?? [], + includeReservedWorkspaceContentRoots + ) + : [], + ...options.aindexDir != null ? collectResolvedAindexRules(options.aindexDir) : [], + ...options.rules ?? [] + ] + const compiledRules = dedupeAndCompileRules(expandProtectedPathRules(rules)) + + return { + rules: compiledRules.map(rule => ({ + path: rule.path, + protectionMode: rule.protectionMode, + reason: rule.reason, + source: rule.source, + ...rule.matcher != null ? {matcher: rule.matcher} : {} + })), + exactProtectedPaths: compiledRules + .filter(rule => rule.protectionMode === 'direct') + .map(rule => rule.path), + subtreeProtectedPaths: compiledRules + .filter(rule => rule.protectionMode === 'recursive') + .map(rule => rule.path), + compiledRules + } +} + +export function collectProjectRoots(collectedOutputContext: OutputCollectedContext): string[] { + const projectRoots = new Set() + + for (const project of collectedOutputContext.workspace.projects) { + const absolutePath = project.dirFromWorkspacePath?.getAbsolutePath?.() + if (absolutePath != null && absolutePath.length > 0) projectRoots.add(resolveAbsolutePath(absolutePath)) + } + + return [...projectRoots] +} + +function selectMoreSpecificRule( + candidate: CompiledProtectedPathRule, + current: CompiledProtectedPathRule | undefined +): CompiledProtectedPathRule { + if (current == null) return candidate + if (candidate.specificity !== current.specificity) return candidate.specificity > current.specificity ? candidate : current + if (candidate.protectionMode !== current.protectionMode) return candidate.protectionMode === 'recursive' ? candidate : current + return candidate.path.localeCompare(current.path) < 0 ? candidate : current +} + +export function getProtectedPathViolation( + targetPath: string, + guard: ProtectedDeletionGuard +): ProtectedPathViolation | undefined { + const absoluteTargetPath = resolveAbsolutePath(targetPath) + const targetKeys = buildComparisonKeys(absoluteTargetPath) + let matchedRule: CompiledProtectedPathRule | undefined + + for (const rule of guard.compiledRules) { + let didMatch = false + + for (const targetKey of targetKeys) { + for (const ruleKey of rule.comparisonKeys) { + if (!isRuleMatch(targetKey, ruleKey, rule.protectionMode)) continue + matchedRule = selectMoreSpecificRule(rule, matchedRule) + didMatch = true + break + } + + if (didMatch) break + } + } + + if (matchedRule == null) return void 0 + + return { + targetPath: absoluteTargetPath, + protectedPath: matchedRule.path, + protectionMode: matchedRule.protectionMode, + reason: matchedRule.reason, + source: matchedRule.source + } +} + +export function partitionDeletionTargets( + targetPaths: readonly string[], + guard: ProtectedDeletionGuard +): {safePaths: string[], violations: ProtectedPathViolation[]} { + const safePaths: string[] = [] + const violationsByTargetPath = new Map() + + for (const targetPath of targetPaths) { + const absoluteTargetPath = resolveAbsolutePath(targetPath) + const violation = getProtectedPathViolation(absoluteTargetPath, guard) + if (violation == null) { + safePaths.push(absoluteTargetPath) + continue + } + + if (!violationsByTargetPath.has(violation.targetPath)) violationsByTargetPath.set(violation.targetPath, violation) + } + + return { + safePaths, + violations: [...violationsByTargetPath.values()].sort((a, b) => a.targetPath.localeCompare(b.targetPath)) + } +} + +export function buildProtectedDeletionGuardMessage( + operation: string, + violations: readonly ProtectedPathViolation[] +): string { + const pathList = violations.map(violation => violation.targetPath).join(', ') + return `Protected deletion guard blocked ${operation} for ${violations.length} path(s): ${pathList}` +} + +export function logProtectedDeletionGuardError( + logger: {error: (message: string, meta?: object) => void}, + operation: string, + violations: readonly ProtectedPathViolation[] +): void { + logger.error('protected deletion guard triggered', { + operation, + count: violations.length, + violations: violations.map(violation => ({ + targetPath: violation.targetPath, + protectedPath: violation.protectedPath, + protectionMode: violation.protectionMode, + source: violation.source, + reason: violation.reason + })) + }) +} diff --git a/cli/src/bridge/node.rs b/cli/src/bridge/node.rs index 7fcf92e5..4d7b85ff 100644 --- a/cli/src/bridge/node.rs +++ b/cli/src/bridge/node.rs @@ -280,7 +280,7 @@ pub fn run_node_command_captured( let stdout = String::from_utf8_lossy(&output.stdout).to_string(); let stderr = String::from_utf8_lossy(&output.stderr).to_string(); - if output.status.success() { + if output.status.success() || (json_mode && !stdout.trim().is_empty()) { Ok(BridgeCommandResult { stdout, stderr, exit_code }) } else { Err(CliError::NodeProcessFailed { code: exit_code, stderr }) diff --git a/cli/src/cli.rs b/cli/src/cli.rs index 29e2fdde..7a4abab5 100644 --- a/cli/src/cli.rs +++ b/cli/src/cli.rs @@ -1,6 +1,6 @@ //! CLI argument parsing using clap derive API. //! -//! Mirrors the TS `PluginPipeline.parseArgs()` + `resolveCommand()` + `resolveLogLevel()`. +//! Mirrors the TS `PluginPipeline.parseArgs()` + `resolveCommand()`. use clap::{Parser, Subcommand, Args}; @@ -49,9 +49,6 @@ pub enum CliCommand { /// Show version information Version, - /// Check if CLI version is outdated against npm registry - Outdated, - /// Preview changes without writing files #[command(name = "dry-run")] DryRun, @@ -154,7 +151,6 @@ pub fn resolve_log_level(cli: &Cli) -> Option { pub enum ResolvedCommand { Help, Version, - Outdated, Execute, DryRun, Clean, @@ -193,7 +189,6 @@ pub fn resolve_command(cli: &Cli) -> ResolvedCommand { None => ResolvedCommand::Execute, Some(CliCommand::Help) => ResolvedCommand::Help, Some(CliCommand::Version) => ResolvedCommand::Version, - Some(CliCommand::Outdated) => ResolvedCommand::Outdated, Some(CliCommand::DryRun) => ResolvedCommand::DryRun, Some(CliCommand::Clean(args)) => { if args.dry_run { @@ -245,12 +240,6 @@ mod tests { assert_eq!(resolve_command(&cli), ResolvedCommand::Version); } - #[test] - fn test_outdated_subcommand() { - let cli = parse(&["tnmsc", "outdated"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::Outdated); - } - #[test] fn test_dry_run_subcommand() { let cli = parse(&["tnmsc", "dry-run"]); diff --git a/cli/src/commands/CleanCommand.ts b/cli/src/commands/CleanCommand.ts index dfd36335..bb8be0a8 100644 --- a/cli/src/commands/CleanCommand.ts +++ b/cli/src/commands/CleanCommand.ts @@ -14,6 +14,15 @@ export class CleanCommand implements Command { const cleanCtx = createCleanContext(false) const result = await performCleanup(outputPlugins, cleanCtx, logger) + if (result.violations.length > 0 || result.conflicts.length > 0) { + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + ...result.message != null ? {message: result.message} : {} + } + } + logger.info('clean complete', {deletedFiles: result.deletedFiles, deletedDirs: result.deletedDirs}) return { diff --git a/cli/src/commands/CleanupUtils.test.ts b/cli/src/commands/CleanupUtils.test.ts new file mode 100644 index 00000000..113fcf8c --- /dev/null +++ b/cli/src/commands/CleanupUtils.test.ts @@ -0,0 +1,533 @@ +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import { + FilePathKind, + IDEKind, + PluginKind +} from '../plugins/plugin-core' +import {collectDeletionTargets} from './CleanupUtils' + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} + } as ILogger +} + +function createCleanContext(overrides?: Partial): OutputCleanContext { + return { + logger: createMockLogger(), + fs, + path, + glob, + dryRun: true, + pluginOptions: {cleanupProtection: {}}, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Relative, + path: '.', + basePath: '.', + getDirectoryName: () => '.', + getAbsolutePath: () => path.resolve('.') + }, + projects: [] + }, + ...overrides + } + } as OutputCleanContext +} + +function createMockOutputPlugin(name: string, outputs: readonly string[], cleanup?: OutputCleanupDeclarations): OutputPlugin { + return { + type: PluginKind.Output, + name, + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return outputs.map(output => ({path: output, source: {}})) + }, + async declareCleanupPaths() { + return cleanup ?? {} + }, + async convertContent() { + return '' + } + } +} + +describe('collectDeletionTargets', () => { + it('throws when an output path matches a protected input source file', async () => { + const editorSource = path.resolve('tmp-aindex/public/.editorconfig') + const ignoreSource = path.resolve('tmp-aindex/public/.cursorignore') + + const ctx = createCleanContext({ + editorConfigFiles: [{ + type: IDEKind.EditorConfig, + content: 'root = true', + length: 11, + filePathKind: FilePathKind.Absolute, + dir: { + pathKind: FilePathKind.Absolute, + path: editorSource, + getDirectoryName: () => '.editorconfig' + } + }], + aiAgentIgnoreConfigFiles: [{ + fileName: '.cursorignore', + content: 'node_modules', + sourcePath: ignoreSource + }] + }) + + const plugin = createMockOutputPlugin('MockOutputPlugin', [editorSource, ignoreSource]) + + await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') + }) + + it('keeps non-overlapping output paths for cleanup', async () => { + const outputA = path.resolve('tmp-out/a.md') + const outputB = path.resolve('tmp-out/b.md') + const ctx = createCleanContext() + const plugin = createMockOutputPlugin('MockOutputPlugin', [outputA, outputB]) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(new Set(result.filesToDelete)).toEqual(new Set([outputA, outputB])) + expect(result.violations).toEqual([]) + }) + + it('throws when an output path matches a known aindex protected config file', async () => { + const aindexDir = path.resolve('tmp-aindex') + const editorConfigOutput = path.resolve(aindexDir, 'public', '.editorconfig') + const ctx = createCleanContext({aindexDir}) + const plugin = createMockOutputPlugin('MockOutputPlugin', [editorConfigOutput]) + + await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') + }) + + it('compacts nested delete targets to reduce IO', async () => { + const claudeBaseDir = path.resolve('tmp-out/.claude') + const ruleDir = path.join(claudeBaseDir, 'rules') + const ruleFile = path.join(ruleDir, 'a.md') + const ctx = createCleanContext() + const plugin = createMockOutputPlugin( + 'MockOutputPlugin', + [ruleFile], + { + delete: [ + {kind: 'directory', path: claudeBaseDir}, + {kind: 'directory', path: ruleDir}, + {kind: 'file', path: ruleFile} + ] + } + ) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([claudeBaseDir]) + expect(result.filesToDelete).toEqual([]) + }) + + it('skips parent deletion when a protected child path exists', async () => { + const codexBaseDir = path.resolve('tmp-out/.codex') + const promptsDir = path.join(codexBaseDir, 'prompts') + const protectedSystemDir = path.join(codexBaseDir, 'skills', '.system') + const ctx = createCleanContext() + const plugin = createMockOutputPlugin( + 'MockOutputPlugin', + [], + { + delete: [ + {kind: 'directory', path: codexBaseDir}, + {kind: 'directory', path: promptsDir} + ], + protect: [ + {kind: 'directory', path: protectedSystemDir} + ] + } + ) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([promptsDir]) + expect(result.violations.map(violation => violation.targetPath)).toEqual([codexBaseDir]) + }) + + it('blocks deleting dangerous roots and returns the most specific matching rule', async () => { + const homeDir = os.homedir() + const ctx = createCleanContext() + const plugin = createMockOutputPlugin( + 'MockOutputPlugin', + [], + { + delete: [{kind: 'directory', path: homeDir}] + } + ) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([]) + expect(result.filesToDelete).toEqual([]) + expect(result.violations).toEqual([expect.objectContaining({ + targetPath: path.resolve(homeDir), + protectedPath: path.resolve('knowladge'), + protectionMode: 'direct' + })]) + }) + + it('throws when an output path matches a built-in protected path before directory guards run', async () => { + const workspaceDir = path.resolve('tmp-workspace-root') + const projectRoot = path.join(workspaceDir, 'project-a') + const aindexDir = path.join(workspaceDir, 'aindex') + const globalAindexDir = path.join(os.homedir(), '.aindex') + const globalConfigPath = path.join(globalAindexDir, '.tnmsc.json') + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [{ + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => projectRoot + } + }] + }, + aindexDir + }) + const plugin = createMockOutputPlugin( + 'MockOutputPlugin', + [globalConfigPath], + { + delete: [ + {kind: 'directory', path: globalAindexDir}, + {kind: 'directory', path: workspaceDir}, + {kind: 'directory', path: projectRoot}, + {kind: 'directory', path: aindexDir} + ] + } + ) + + await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow(`Cleanup protection conflict: 1 output path(s) are also protected: ${path.resolve(globalConfigPath)}`) + }) + + it('allows deleting non-mdx files under dist while blocking reserved dist mdx files', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-dist-mdx-')) + const workspaceDir = path.join(tempDir, 'workspace') + const distCommandDir = path.join(workspaceDir, 'aindex', 'dist', 'commands') + const projectChildFile = path.join(workspaceDir, 'project-a', 'AGENTS.md') + const protectedDistMdxFile = path.join(distCommandDir, 'demo.mdx') + const safeDistMarkdownFile = path.join(distCommandDir, 'README.md') + const globalChildDir = path.join(os.homedir(), '.aindex', '.codex', 'prompts') + const aindexSourceDir = path.join(workspaceDir, 'aindex', 'commands') + + fs.mkdirSync(path.dirname(projectChildFile), {recursive: true}) + fs.mkdirSync(distCommandDir, {recursive: true}) + fs.mkdirSync(aindexSourceDir, {recursive: true}) + fs.writeFileSync(projectChildFile, '# agent', 'utf8') + fs.writeFileSync(protectedDistMdxFile, '# compiled', 'utf8') + fs.writeFileSync(safeDistMarkdownFile, '# doc', 'utf8') + + try { + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [{ + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + } + }] + }, + aindexDir: path.join(workspaceDir, 'aindex') + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [ + projectChildFile, + safeDistMarkdownFile + ], { + delete: [ + {kind: 'file', path: protectedDistMdxFile}, + {kind: 'directory', path: globalChildDir}, + {kind: 'directory', path: aindexSourceDir} + ] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(new Set(result.filesToDelete)).toEqual(new Set([ + path.resolve(projectChildFile), + path.resolve(safeDistMarkdownFile) + ])) + expect(new Set(result.dirsToDelete)).toEqual(new Set([path.resolve(globalChildDir), path.resolve(aindexSourceDir)])) + expect(result.violations).toEqual([expect.objectContaining({ + targetPath: path.resolve(protectedDistMdxFile), + protectionMode: 'direct', + protectedPath: path.resolve(protectedDistMdxFile) + })]) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('blocks deleting a dist directory when protected mdx descendants exist', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-dist-dir-')) + const workspaceDir = path.join(tempDir, 'workspace') + const distCommandDir = path.join(workspaceDir, 'aindex', 'dist', 'commands') + const protectedDistMdxFile = path.join(distCommandDir, 'demo.mdx') + + fs.mkdirSync(distCommandDir, {recursive: true}) + fs.writeFileSync(protectedDistMdxFile, '# compiled', 'utf8') + + try { + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + }, + aindexDir: path.join(workspaceDir, 'aindex') + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [{kind: 'directory', path: distCommandDir}] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([]) + expect(result.filesToDelete).toEqual([]) + expect(result.violations).toEqual([expect.objectContaining({ + targetPath: path.resolve(distCommandDir), + protectionMode: 'direct', + protectedPath: path.resolve(protectedDistMdxFile) + })]) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('allows deleting non-mdx files under app while blocking reserved app mdx files', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-app-mdx-')) + const workspaceDir = path.join(tempDir, 'workspace') + const appDir = path.join(workspaceDir, 'aindex', 'app') + const protectedAppMdxFile = path.join(appDir, 'guide.mdx') + const safeAppMarkdownFile = path.join(appDir, 'README.md') + + fs.mkdirSync(appDir, {recursive: true}) + fs.writeFileSync(protectedAppMdxFile, '# app guide', 'utf8') + fs.writeFileSync(safeAppMarkdownFile, '# readme', 'utf8') + + try { + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + }, + aindexDir: path.join(workspaceDir, 'aindex') + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [safeAppMarkdownFile], { + delete: [{kind: 'file', path: protectedAppMdxFile}] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.filesToDelete).toEqual([path.resolve(safeAppMarkdownFile)]) + expect(result.violations).toEqual([expect.objectContaining({ + targetPath: path.resolve(protectedAppMdxFile), + protectionMode: 'direct', + protectedPath: path.resolve(protectedAppMdxFile) + })]) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('throws when an output file path exactly matches a cleanup protect declaration', async () => { + const outputPath = path.resolve('tmp-out/protected.md') + const ctx = createCleanContext() + const plugin = createMockOutputPlugin('MockOutputPlugin', [outputPath], { + protect: [{kind: 'file', path: outputPath}] + }) + + await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') + }) + + it('blocks deleting an app directory when protected mdx descendants exist', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-app-dir-')) + const workspaceDir = path.join(tempDir, 'workspace') + const appSubDir = path.join(workspaceDir, 'aindex', 'app', 'nested') + const protectedAppMdxFile = path.join(appSubDir, 'guide.mdx') + + fs.mkdirSync(appSubDir, {recursive: true}) + fs.writeFileSync(protectedAppMdxFile, '# app guide', 'utf8') + + try { + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + }, + aindexDir: path.join(workspaceDir, 'aindex') + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [{kind: 'directory', path: path.join(workspaceDir, 'aindex', 'app')}] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([]) + expect(result.filesToDelete).toEqual([]) + expect(result.violations).toEqual([expect.objectContaining({ + targetPath: path.resolve(path.join(workspaceDir, 'aindex', 'app')), + protectionMode: 'direct', + protectedPath: path.resolve(protectedAppMdxFile) + })]) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('blocks symlink targets that resolve to a protected path and keeps the most specific match', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-guard-')) + const workspaceDir = path.join(tempDir, 'workspace') + const symlinkPath = path.join(tempDir, 'workspace-link') + + fs.mkdirSync(workspaceDir, {recursive: true}) + + try { + const symlinkType: 'junction' | 'dir' = process.platform === 'win32' ? 'junction' : 'dir' + fs.symlinkSync(workspaceDir, symlinkPath, symlinkType) + + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + } + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [{kind: 'directory', path: symlinkPath}] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([]) + expect(result.violations).toEqual([expect.objectContaining({ + targetPath: path.resolve(symlinkPath), + protectedPath: path.resolve(path.join(workspaceDir, 'knowladge')), + protectionMode: 'direct' + })]) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) + + it('lets direct protect declarations keep descendants deletable while recursive protect declarations block them', async () => { + const workspaceDir = path.resolve('tmp-direct-vs-recursive') + const directProtectedDir = path.join(workspaceDir, 'project-a') + const recursiveProtectedDir = path.join(workspaceDir, 'aindex', 'dist') + const directChildFile = path.join(directProtectedDir, 'AGENTS.md') + const recursiveChildFile = path.join(recursiveProtectedDir, 'commands', 'demo.mdx') + const ctx = createCleanContext({ + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + } + }) + const plugin = createMockOutputPlugin('MockOutputPlugin', [directChildFile, recursiveChildFile], { + protect: [ + {kind: 'directory', path: directProtectedDir, protectionMode: 'direct'}, + {kind: 'directory', path: recursiveProtectedDir, protectionMode: 'recursive'} + ] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.filesToDelete).toEqual([path.resolve(directChildFile)]) + expect(result.violations).toEqual([expect.objectContaining({ + targetPath: path.resolve(recursiveChildFile), + protectionMode: 'recursive', + protectedPath: path.resolve(recursiveProtectedDir) + })]) + }) + + it('skips delete glob matches covered by excludeScanGlobs while still deleting other sibling directories', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-exclude-glob-')) + const skillsDir = path.join(tempDir, '.cursor', 'skills-cursor') + const preservedDir = path.join(skillsDir, 'create-rule') + const staleDir = path.join(skillsDir, 'legacy-skill') + + fs.mkdirSync(preservedDir, {recursive: true}) + fs.mkdirSync(staleDir, {recursive: true}) + fs.writeFileSync(path.join(preservedDir, 'SKILL.md'), '# preserved', 'utf8') + fs.writeFileSync(path.join(staleDir, 'SKILL.md'), '# stale', 'utf8') + + try { + const ctx = createCleanContext() + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [{kind: 'glob', path: path.join(skillsDir, '*')}], + protect: [{kind: 'directory', path: preservedDir}], + excludeScanGlobs: [preservedDir, path.join(preservedDir, '**')] + }) + + const result = await collectDeletionTargets([plugin], ctx) + + expect(result.dirsToDelete).toEqual([path.resolve(staleDir)]) + expect(result.filesToDelete).toEqual([]) + expect(result.violations).toEqual([]) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/commands/CleanupUtils.ts b/cli/src/commands/CleanupUtils.ts index d504440f..392a021e 100644 --- a/cli/src/commands/CleanupUtils.ts +++ b/cli/src/commands/CleanupUtils.ts @@ -1,7 +1,20 @@ -import type {ILogger, OutputCleanContext, OutputPlugin} from '../plugins/plugin-shared' +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputCleanupPathDeclaration, OutputPlugin} from '../plugins/plugin-core' +import type {ProtectedPathRule, ProtectionMode, ProtectionRuleMatcher} from '../ProtectedDeletionGuard' +import * as fs from 'node:fs' import * as path from 'node:path' import {deleteDirectories as deskDeleteDirectories, deleteFiles as deskDeleteFiles} from '../plugins/desk-paths' -import {checkCanClean, collectAllPluginOutputs, executeOnCleanComplete} from '../plugins/plugin-shared' +import { + collectAllPluginOutputs +} from '../plugins/plugin-core' +import { + buildComparisonKeys, + collectProjectRoots, + collectProtectedInputSourceRules, + createProtectedDeletionGuard, + logProtectedDeletionGuardError, + partitionDeletionTargets, + resolveAbsolutePath +} from '../ProtectedDeletionGuard' /** * Result of cleanup operation @@ -10,6 +23,9 @@ export interface CleanupResult { readonly deletedFiles: number readonly deletedDirs: number readonly errors: readonly CleanupError[] + readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[] + readonly conflicts: readonly CleanupProtectionConflict[] + readonly message?: string } /** @@ -21,47 +37,329 @@ export interface CleanupError { readonly error: unknown } -/** - * Options for cleanup operation - */ -export interface CleanupOptions { - readonly executeHooks?: boolean +export interface CleanupProtectionConflict { + readonly outputPath: string + readonly outputPlugin: string + readonly protectedPath: string + readonly protectionMode: ProtectionMode + readonly protectedBy: string + readonly reason: string +} + +export class CleanupProtectionConflictError extends Error { + readonly conflicts: readonly CleanupProtectionConflict[] + + constructor(conflicts: readonly CleanupProtectionConflict[]) { + super(buildCleanupProtectionConflictMessage(conflicts)) + this.name = 'CleanupProtectionConflictError' + this.conflicts = conflicts + } +} + +interface CleanupTargetCollections { + readonly filesToDelete: string[] + readonly dirsToDelete: string[] + readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[] + readonly conflicts: readonly CleanupProtectionConflict[] + readonly excludedScanGlobs: string[] +} + +const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS = [ + '**/node_modules/**', + '**/.git/**', + '**/.turbo/**', + '**/.pnpm-store/**', + '**/.yarn/**', + '**/.next/**' +] as const + +function normalizeGlobPattern(pattern: string): string { + return resolveAbsolutePath(pattern).replaceAll('\\', '/') +} + +function stripTrailingSeparator(rawPath: string): string { + const {root} = path.parse(rawPath) + if (rawPath === root) return rawPath + return rawPath.endsWith(path.sep) ? rawPath.slice(0, -1) : rawPath +} + +function isSameOrChildPath(candidate: string, parent: string): boolean { + const normalizedCandidate = stripTrailingSeparator(candidate) + const normalizedParent = stripTrailingSeparator(parent) + if (normalizedCandidate === normalizedParent) return true + return normalizedCandidate.startsWith(`${normalizedParent}${path.sep}`) +} + +function expandCleanupGlob( + pattern: string, + cleanCtx: OutputCleanContext, + ignoreGlobs: readonly string[] +): readonly string[] { + const normalizedPattern = normalizeGlobPattern(pattern) + return cleanCtx.glob.sync(normalizedPattern, { + onlyFiles: false, + dot: true, + absolute: true, + followSymbolicLinks: false, + ignore: [...ignoreGlobs] + }) +} + +async function collectPluginCleanupDeclarations( + plugin: OutputPlugin, + cleanCtx: OutputCleanContext +): Promise { + if (plugin.declareCleanupPaths == null) return {} + return plugin.declareCleanupPaths({...cleanCtx, dryRun: true}) +} + +function compactDeletionTargets( + filesByKey: Map, + dirsByKey: Map +): {files: string[], dirs: string[]} { + const compactedDirs = new Map() + const sortedDirEntries = [...dirsByKey.entries()].sort((a, b) => a[0].length - b[0].length) + + for (const [dirKey, dirPath] of sortedDirEntries) { + let coveredByParent = false + for (const existingParentKey of compactedDirs.keys()) { + if (isSameOrChildPath(dirKey, existingParentKey)) { + coveredByParent = true + break + } + } + if (!coveredByParent) compactedDirs.set(dirKey, dirPath) + } + + const compactedFiles: string[] = [] + for (const [fileKey, filePath] of filesByKey) { + let coveredByDir = false + for (const dirKey of compactedDirs.keys()) { + if (isSameOrChildPath(fileKey, dirKey)) { + coveredByDir = true + break + } + } + if (!coveredByDir) compactedFiles.push(filePath) + } + + compactedFiles.sort((a, b) => a.localeCompare(b)) + const compactedDirPaths = [...compactedDirs.values()].sort((a, b) => a.localeCompare(b)) + return {files: compactedFiles, dirs: compactedDirPaths} +} + +function buildCleanupProtectionConflictMessage(conflicts: readonly CleanupProtectionConflict[]): string { + const pathList = conflicts.map(conflict => conflict.outputPath).join(', ') + return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}` +} + +function detectCleanupProtectionConflicts( + outputPathOwners: ReadonlyMap, + guard: ReturnType +): CleanupProtectionConflict[] { + const conflicts: CleanupProtectionConflict[] = [] + + for (const [outputPath, outputPlugins] of outputPathOwners.entries()) { + const outputKeys = new Set(buildComparisonKeys(outputPath)) + + for (const rule of guard.compiledRules) { + const isExactMatch = rule.comparisonKeys.some(ruleKey => outputKeys.has(ruleKey)) + if (!isExactMatch) continue + + for (const outputPlugin of outputPlugins) { + conflicts.push({ + outputPath, + outputPlugin, + protectedPath: rule.path, + protectionMode: rule.protectionMode, + protectedBy: rule.source, + reason: rule.reason + }) + } + } + } + + return conflicts.sort((a, b) => { + const pathDiff = a.outputPath.localeCompare(b.outputPath) + if (pathDiff !== 0) return pathDiff + return a.protectedPath.localeCompare(b.protectedPath) + }) +} + +function logCleanupProtectionConflicts( + logger: ILogger, + conflicts: readonly CleanupProtectionConflict[] +): void { + logger.error('cleanup protection conflict detected', { + count: conflicts.length, + conflicts: conflicts.map(conflict => ({ + outputPath: conflict.outputPath, + outputPlugin: conflict.outputPlugin, + protectedPath: conflict.protectedPath, + protectionMode: conflict.protectionMode, + protectedBy: conflict.protectedBy, + reason: conflict.reason + })) + }) } /** - * Collect deletion targets from enabled output plugins + * Collect deletion targets from enabled output plugins. */ export async function collectDeletionTargets( outputPlugins: readonly OutputPlugin[], - permissions: Map, cleanCtx: OutputCleanContext -): Promise<{filesToDelete: string[], dirsToDelete: string[]}> { - const filesToDelete: string[] = [] - const dirsToDelete: string[] = [] +): Promise<{ + filesToDelete: string[] + dirsToDelete: string[] + violations: import('../ProtectedDeletionGuard').ProtectedPathViolation[] + conflicts: CleanupProtectionConflict[] + excludedScanGlobs: string[] +}> { + const deleteFiles = new Set() + const deleteDirs = new Set() + const protectedRules = new Map() + const excludeScanGlobSet = new Set(DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS) + const outputPathOwners = new Map() + + const pluginSnapshots: { + readonly plugin: OutputPlugin + readonly cleanup: OutputCleanupDeclarations + }[] = [] + + const addDeletePath = (rawPath: string, kind: 'file' | 'directory'): void => { + if (kind === 'directory') deleteDirs.add(resolveAbsolutePath(rawPath)) + else deleteFiles.add(resolveAbsolutePath(rawPath)) + } + + const addProtectRule = ( + rawPath: string, + protectionMode: ProtectionMode, + reason: string, + source: string, + matcher: ProtectionRuleMatcher = 'path' + ): void => { + const resolvedPath = resolveAbsolutePath(rawPath) + protectedRules.set(`${matcher}:${protectionMode}:${resolvedPath}`, { + path: resolvedPath, + protectionMode, + reason, + source, + matcher + }) + } + + const defaultProtectionModeForTarget = (target: OutputCleanupPathDeclaration): ProtectionMode => { + if (target.protectionMode != null) return target.protectionMode + return target.kind === 'file' ? 'direct' : 'recursive' + } + + for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source) + + for (const rule of cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []) { + addProtectRule( + rule.path, + rule.protectionMode, + rule.reason ?? 'configured cleanup protection rule', + 'configured-cleanup-protection', + rule.matcher ?? 'path' + ) + } for (const plugin of outputPlugins) { - const perm = permissions.get(plugin.name) - if (perm?.project) { - const projectFiles = await plugin.registerProjectOutputFiles?.(cleanCtx) ?? [] - const projectDirs = await plugin.registerProjectOutputDirs?.(cleanCtx) ?? [] - filesToDelete.push(...projectFiles.map(f => f.getAbsolutePath())) - dirsToDelete.push(...projectDirs.map(d => d.getAbsolutePath())) + const declarations = await plugin.declareOutputFiles({...cleanCtx, dryRun: true}) + for (const declaration of declarations) { + const resolvedOutputPath = resolveAbsolutePath(declaration.path) + addDeletePath(resolvedOutputPath, 'file') + const existingOwners = outputPathOwners.get(resolvedOutputPath) + if (existingOwners == null) outputPathOwners.set(resolvedOutputPath, [plugin.name]) + else if (!existingOwners.includes(plugin.name)) existingOwners.push(plugin.name) + } + + const cleanupDeclarations = await collectPluginCleanupDeclarations(plugin, cleanCtx) + for (const ignoreGlob of cleanupDeclarations.excludeScanGlobs ?? []) excludeScanGlobSet.add(normalizeGlobPattern(ignoreGlob)) + pluginSnapshots.push({plugin, cleanup: cleanupDeclarations}) + } + + const excludeScanGlobs = [...excludeScanGlobSet] + + const resolveDeleteGlob = (target: OutputCleanupPathDeclaration): void => { + for (const matchedPath of expandCleanupGlob(target.path, cleanCtx, excludeScanGlobs)) { + try { + const stat = fs.lstatSync(matchedPath) + if (stat.isDirectory()) addDeletePath(matchedPath, 'directory') + else addDeletePath(matchedPath, 'file') + } + catch {} + } + } + + const resolveProtectGlob = (target: OutputCleanupPathDeclaration, pluginName: string): void => { + const protectionMode = defaultProtectionModeForTarget(target) + const reason = target.label != null + ? `plugin cleanup protect declaration (${target.label})` + : 'plugin cleanup protect declaration' + + for (const matchedPath of expandCleanupGlob(target.path, cleanCtx, excludeScanGlobs)) { + addProtectRule(matchedPath, protectionMode, reason, `plugin-cleanup-protect:${pluginName}`) + } + } + + for (const {plugin, cleanup} of pluginSnapshots) { + for (const target of cleanup.protect ?? []) { + if (target.kind === 'glob') { + resolveProtectGlob(target, plugin.name) + continue + } + addProtectRule( + target.path, + defaultProtectionModeForTarget(target), + target.label != null ? `plugin cleanup protect declaration (${target.label})` : 'plugin cleanup protect declaration', + `plugin-cleanup-protect:${plugin.name}` + ) } - if (perm?.global) { - const globalFiles = await plugin.registerGlobalOutputFiles?.(cleanCtx) ?? [] - const globalDirs = await plugin.registerGlobalOutputDirs?.(cleanCtx) ?? [] - filesToDelete.push(...globalFiles.map(f => f.getAbsolutePath())) - dirsToDelete.push(...globalDirs.map(d => d.getAbsolutePath())) + + for (const target of cleanup.delete ?? []) { + if (target.kind === 'glob') { + resolveDeleteGlob(target) + continue + } + if (target.kind === 'directory') addDeletePath(target.path, 'directory') + else addDeletePath(target.path, 'file') } } - return {filesToDelete, dirsToDelete} + const guard = createProtectedDeletionGuard({ + workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path, + projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext), + rules: [...protectedRules.values()], + ...cleanCtx.collectedOutputContext.aindexDir != null + ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir} + : {} + }) + const conflicts = detectCleanupProtectionConflicts(outputPathOwners, guard) + if (conflicts.length > 0) throw new CleanupProtectionConflictError(conflicts) + const filePartition = partitionDeletionTargets([...deleteFiles], guard) + const dirPartition = partitionDeletionTargets([...deleteDirs], guard) + + const compactedTargets = compactDeletionTargets( + new Map(filePartition.safePaths.map(filePath => [filePath, filePath])), + new Map(dirPartition.safePaths.map(dirPath => [dirPath, dirPath])) + ) + + return { + filesToDelete: compactedTargets.files, + dirsToDelete: compactedTargets.dirs, + violations: [...filePartition.violations, ...dirPartition.violations].sort((a, b) => a.targetPath.localeCompare(b.targetPath)), + conflicts: [], + excludedScanGlobs: [...excludeScanGlobSet].sort((a, b) => a.localeCompare(b)) + } } /** - * Delete files with error handling - * Logs warnings for failed deletions and continues with remaining files - * Uses deletePathSync from @truenine/desk-paths for cross-platform safe deletion + * Delete files with error handling. + * Logs warnings for failed deletions and continues with remaining files. + * Uses deletePathSync from @truenine/desk-paths for cross-platform safe deletion. */ export function deleteFiles(files: string[], logger: ILogger): {deleted: number, errors: CleanupError[]} { const resolved = files.map(f => path.isAbsolute(f) ? f : path.resolve(f)) @@ -80,9 +378,9 @@ export function deleteFiles(files: string[], logger: ILogger): {deleted: number, } /** - * Delete directories with error handling - * Sorts by length descending to handle nested dirs properly - * Logs warnings for failed deletions and continues with remaining directories + * Delete directories with error handling. + * Sorts by length descending to handle nested dirs properly. + * Logs warnings for failed deletions and continues with remaining directories. */ export function deleteDirectories(dirs: string[], logger: ILogger): {deleted: number, errors: CleanupError[]} { const resolved = dirs.map(d => path.isAbsolute(d) ? d : path.resolve(d)) @@ -100,49 +398,86 @@ export function deleteDirectories(dirs: string[], logger: ILogger): {deleted: nu return {deleted: result.deleted, errors} } +function logCleanupPlanDiagnostics( + logger: ILogger, + targets: CleanupTargetCollections +): void { + logger.debug('cleanup plan built', { + filesToDelete: targets.filesToDelete.length, + dirsToDelete: targets.dirsToDelete.length, + violations: targets.violations.length, + conflicts: targets.conflicts.length, + excludedScanGlobs: targets.excludedScanGlobs + }) +} + /** - * Perform cleanup operation for output plugins + * Perform cleanup operation for output plugins. * This is the main reusable cleanup function that can be called from both - * CleanCommand and ExecuteCommand (for pre-cleanup) - * - * @param outputPlugins - Output plugins to clean - * @param cleanCtx - Clean context - * @param logger - Logger instance - * @param options - Cleanup options - * @returns Cleanup result with counts and errors + * CleanCommand and ExecuteCommand (for pre-cleanup). */ export async function performCleanup( outputPlugins: readonly OutputPlugin[], cleanCtx: OutputCleanContext, - logger: ILogger, - options?: CleanupOptions + logger: ILogger ): Promise { - const {executeHooks = true} = options ?? {} - const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx) // Collect outputs for logging logger.debug('Collected outputs for cleanup', { projectDirs: outputs.projectDirs.length, projectFiles: outputs.projectFiles.length, + workspaceDirs: outputs.workspaceDirs.length, + workspaceFiles: outputs.workspaceFiles.length, globalDirs: outputs.globalDirs.length, globalFiles: outputs.globalFiles.length }) - const permissions = await checkCanClean(outputPlugins, cleanCtx) // Check permissions - - const {filesToDelete, dirsToDelete} = await collectDeletionTargets( // Collect deletion targets - outputPlugins, - permissions, - cleanCtx - ) + let targets: Awaited> + try { + targets = await collectDeletionTargets(outputPlugins, cleanCtx) + } + catch (error) { + if (error instanceof CleanupProtectionConflictError) { + logCleanupProtectionConflicts(logger, error.conflicts) + return { + deletedFiles: 0, + deletedDirs: 0, + errors: [], + violations: [], + conflicts: error.conflicts, + message: error.message + } + } + throw error + } + const cleanupTargets: CleanupTargetCollections = { + filesToDelete: targets.filesToDelete, + dirsToDelete: targets.dirsToDelete, + violations: targets.violations, + conflicts: targets.conflicts, + excludedScanGlobs: targets.excludedScanGlobs + } + logCleanupPlanDiagnostics(logger, cleanupTargets) - const fileResult = deleteFiles(filesToDelete, logger) // Perform deletions - const dirResult = deleteDirectories(dirsToDelete, logger) + if (cleanupTargets.violations.length > 0) { + logProtectedDeletionGuardError(logger, 'cleanup', cleanupTargets.violations) + return { + deletedFiles: 0, + deletedDirs: 0, + errors: [], + violations: cleanupTargets.violations, + conflicts: [], + message: `Protected deletion guard blocked cleanup for ${cleanupTargets.violations.length} path(s)` + } + } - if (executeHooks) await executeOnCleanComplete(outputPlugins, cleanCtx) // Execute hooks if requested + const fileResult = deleteFiles(cleanupTargets.filesToDelete, logger) + const dirResult = deleteDirectories(cleanupTargets.dirsToDelete, logger) return { deletedFiles: fileResult.deleted, deletedDirs: dirResult.deleted, - errors: [...fileResult.errors, ...dirResult.errors] + errors: [...fileResult.errors, ...dirResult.errors], + violations: [], + conflicts: [] } } diff --git a/cli/src/commands/Command.ts b/cli/src/commands/Command.ts index 4f8b14b3..4e6c00c7 100644 --- a/cli/src/commands/Command.ts +++ b/cli/src/commands/Command.ts @@ -1,4 +1,4 @@ -import type {CollectedInputContext, ILogger, OutputCleanContext, OutputPlugin, OutputWriteContext, PluginOptions, UserConfigFile} from '../plugins/plugin-shared' +import type {ILogger, OutputCleanContext, OutputCollectedContext, OutputPlugin, OutputWriteContext, PluginOptions, UserConfigFile} from '../plugins/plugin-core' /** * Command execution context @@ -6,7 +6,7 @@ import type {CollectedInputContext, ILogger, OutputCleanContext, OutputPlugin, O export interface CommandContext { readonly logger: ILogger readonly outputPlugins: readonly OutputPlugin[] - readonly collectedInputContext: CollectedInputContext + readonly collectedOutputContext: OutputCollectedContext readonly userConfigOptions: Required readonly createCleanContext: (dryRun: boolean) => OutputCleanContext readonly createWriteContext: (dryRun: boolean) => OutputWriteContext diff --git a/cli/src/commands/CommandFactory.ts b/cli/src/commands/CommandFactory.ts index 226468f0..3604485f 100644 --- a/cli/src/commands/CommandFactory.ts +++ b/cli/src/commands/CommandFactory.ts @@ -1,5 +1,5 @@ import type {Command} from './Command' -import type {ParsedCliArgs} from '@/pipeline' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' /** * Command factory interface diff --git a/cli/src/commands/CommandRegistry.ts b/cli/src/commands/CommandRegistry.ts index 743607ee..91d16351 100644 --- a/cli/src/commands/CommandRegistry.ts +++ b/cli/src/commands/CommandRegistry.ts @@ -1,6 +1,6 @@ import type {Command} from './Command' import type {CommandFactory, PrioritizedCommandFactory} from './CommandFactory' -import type {ParsedCliArgs} from '@/pipeline' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {FactoryPriority} from './CommandFactory' /** @@ -25,11 +25,6 @@ export class CommandRegistry { this.factories.sort((a, b) => a.priority - b.priority) } - registerAll(factories: PrioritizedCommandFactory[]): void { - for (const factory of factories) this.factories.push(factory) - this.factories.sort((a, b) => a.priority - b.priority) // Sort by priority after all registrations - } - resolve(args: ParsedCliArgs): Command { for (const factory of this.factories) { // First pass: check prioritized factories (flags, unknown commands) if (factory.priority <= FactoryPriority.Unknown && factory.canHandle(args)) return factory.createCommand(args) diff --git a/cli/src/commands/CommandRegistryFactory.ts b/cli/src/commands/CommandRegistryFactory.ts deleted file mode 100644 index b54c340d..00000000 --- a/cli/src/commands/CommandRegistryFactory.ts +++ /dev/null @@ -1,35 +0,0 @@ -import {FactoryPriority} from './CommandFactory' -import {CommandRegistry} from './CommandRegistry' -import {CleanCommandFactory} from './factories/CleanCommandFactory' -import {ConfigCommandFactory} from './factories/ConfigCommandFactory' -import {DryRunCommandFactory} from './factories/DryRunCommandFactory' -import {ExecuteCommandFactory} from './factories/ExecuteCommandFactory' -import {HelpCommandFactory} from './factories/HelpCommandFactory' -import {OutdatedCommandFactory} from './factories/OutdatedCommandFactory' -import {PluginsCommandFactory} from './factories/PluginsCommandFactory' -import {UnknownCommandFactory} from './factories/UnknownCommandFactory' -import {VersionCommandFactory} from './factories/VersionCommandFactory' - -/** - * Create a default command registry with all standard factories pre-registered - * - * This is in a separate file to avoid circular dependencies between - * CommandRegistry -> Factories -> Commands -> index - */ -export function createDefaultCommandRegistry(): CommandRegistry { - const registry = new CommandRegistry() - - registry.register(new VersionCommandFactory()) // High priority: flag-based commands - registry.register(new HelpCommandFactory()) - registry.register(new UnknownCommandFactory()) - - registry.registerWithPriority(new OutdatedCommandFactory(), FactoryPriority.Subcommand) // Normal priority: subcommand-based commands - registry.registerWithPriority(new DryRunCommandFactory(), FactoryPriority.Subcommand) - registry.registerWithPriority(new CleanCommandFactory(), FactoryPriority.Subcommand) - registry.registerWithPriority(new PluginsCommandFactory(), FactoryPriority.Subcommand) - registry.registerWithPriority(new ConfigCommandFactory(), FactoryPriority.Subcommand) - - registry.registerWithPriority(new ExecuteCommandFactory(), FactoryPriority.Subcommand) // Lowest priority: default/catch-all command - - return registry -} diff --git a/cli/src/commands/CommandUtils.ts b/cli/src/commands/CommandUtils.ts index 17ca3adf..a1b522f9 100644 --- a/cli/src/commands/CommandUtils.ts +++ b/cli/src/commands/CommandUtils.ts @@ -1,24 +1,3 @@ -import type {OutputPlugin, OutputWriteContext} from '../plugins/plugin-shared' -import {checkCanWrite} from '../plugins/plugin-shared' - -/** - * Filter plugins based on write permissions. - * Returns only plugins that are allowed to write to the specified scope. - * - * @param plugins - All output plugins to filter - * @param ctx - Write context for permission checking - * @param scope - Which scope to check ('project' or 'global') - * @returns Filtered array of plugins with write permission - */ -export async function filterPluginsByWritePermission( - plugins: readonly OutputPlugin[], - ctx: OutputWriteContext, - scope: 'project' | 'global' = 'project' -): Promise { - const permissions = await checkCanWrite([...plugins], ctx) - return plugins.filter(p => permissions.get(p.name)?.[scope] ?? true) -} - /** * Result summary from aggregating plugin outputs */ diff --git a/cli/src/commands/DryRunCleanCommand.ts b/cli/src/commands/DryRunCleanCommand.ts index 4d3b105a..67e1e2f4 100644 --- a/cli/src/commands/DryRunCleanCommand.ts +++ b/cli/src/commands/DryRunCleanCommand.ts @@ -1,6 +1,7 @@ import type {Command, CommandContext, CommandResult} from './Command' import * as path from 'node:path' -import {checkCanClean, collectAllPluginOutputs, executeOnCleanComplete} from '../plugins/plugin-shared' +import {collectAllPluginOutputs} from '../plugins/plugin-core' +import {logProtectedDeletionGuardError} from '../ProtectedDeletionGuard' import {collectDeletionTargets} from './CleanupUtils' /** @@ -20,19 +21,34 @@ export class DryRunCleanCommand implements Command { dryRun: true, projectDirs: outputs.projectDirs.length, projectFiles: outputs.projectFiles.length, + workspaceDirs: outputs.workspaceDirs.length, + workspaceFiles: outputs.workspaceFiles.length, globalDirs: outputs.globalDirs.length, globalFiles: outputs.globalFiles.length }) - const permissions = await checkCanClean(outputPlugins, cleanCtx) - const {filesToDelete, dirsToDelete} = await collectDeletionTargets(outputPlugins, permissions, cleanCtx) + const {filesToDelete, dirsToDelete, violations, excludedScanGlobs} = await collectDeletionTargets(outputPlugins, cleanCtx) + + if (violations.length > 0) { + logProtectedDeletionGuardError(logger, 'dry-run-cleanup', violations) + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: `Protected deletion guard blocked cleanup for ${violations.length} path(s)` + } + } this.logDryRunFiles(filesToDelete, logger) this.logDryRunDirectories(dirsToDelete, logger) - await executeOnCleanComplete(outputPlugins, cleanCtx) - - logger.info('clean complete', {dryRun: true, filesAffected: filesToDelete.length, dirsAffected: dirsToDelete.length}) + logger.info('clean complete', { + dryRun: true, + filesAffected: filesToDelete.length, + dirsAffected: dirsToDelete.length, + violations: 0, + excludedScanGlobs + }) return { success: true, diff --git a/cli/src/commands/DryRunOutputCommand.ts b/cli/src/commands/DryRunOutputCommand.ts index f90da009..323daf6a 100644 --- a/cli/src/commands/DryRunOutputCommand.ts +++ b/cli/src/commands/DryRunOutputCommand.ts @@ -1,5 +1,7 @@ import type {Command, CommandContext, CommandResult} from './Command' -import {checkCanWrite, executeWriteOutputs} from '../plugins/plugin-shared' +import { + executeDeclarativeWriteOutputs +} from '../plugins/plugin-core' /** * Dry-run output command - simulates write operations without actual I/O @@ -12,12 +14,7 @@ export class DryRunOutputCommand implements Command { logger.info('started', {command: 'dry-run-output', dryRun: true}) const writeCtx = createWriteContext(true) - const permissions = await checkCanWrite(outputPlugins, writeCtx) - const allowedPlugins = outputPlugins.filter( - p => Boolean(permissions.get(p.name)?.project ?? true) - ) - - const results = await executeWriteOutputs(allowedPlugins, writeCtx) + const results = await executeDeclarativeWriteOutputs(outputPlugins, writeCtx) let totalFiles = 0 let totalDirs = 0 diff --git a/cli/src/commands/ExecuteCommand.ts b/cli/src/commands/ExecuteCommand.ts index dcdad900..9a8c95d1 100644 --- a/cli/src/commands/ExecuteCommand.ts +++ b/cli/src/commands/ExecuteCommand.ts @@ -1,5 +1,7 @@ import type {Command, CommandContext, CommandResult} from './Command' -import {checkCanWrite, executeWriteOutputs} from '../plugins/plugin-shared' +import { + executeDeclarativeWriteOutputs +} from '../plugins/plugin-core' import {performCleanup} from './CleanupUtils' /** @@ -14,24 +16,40 @@ export class ExecuteCommand implements Command { logger.info('started', {command: 'execute'}) const cleanCtx = createCleanContext(false) // Step 1: Pre-cleanup (non-dry-run only) - const cleanupResult = await performCleanup(outputPlugins, cleanCtx, logger, { - executeHooks: false // They will be handled by the write phase // Skip onCleanComplete hooks during pre-cleanup - }) + const cleanupResult = await performCleanup(outputPlugins, cleanCtx, logger) + + if (cleanupResult.violations.length > 0 || cleanupResult.conflicts.length > 0) { + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + ...cleanupResult.message != null ? {message: cleanupResult.message} : {} + } + } + logger.info('cleanup complete', {deletedFiles: cleanupResult.deletedFiles, deletedDirs: cleanupResult.deletedDirs}) const writeCtx = createWriteContext(false) // Step 2: Write outputs - const permissions = await checkCanWrite(outputPlugins, writeCtx) - const allowedPlugins = outputPlugins.filter( - p => permissions.get(p.name)?.project ?? true - ) - - const results = await executeWriteOutputs(allowedPlugins, writeCtx) + const results = await executeDeclarativeWriteOutputs(outputPlugins, writeCtx) let totalFiles = 0 let totalDirs = 0 + const writeErrors: string[] = [] for (const result of results.values()) { totalFiles += result.files.length totalDirs += result.dirs.length + for (const fileResult of result.files) { + if (!fileResult.success) writeErrors.push(fileResult.error?.message ?? `Failed to write ${fileResult.path}`) + } + } + + if (writeErrors.length > 0) { + return { + success: false, + filesAffected: totalFiles, + dirsAffected: totalDirs, + message: writeErrors.join('\n') + } } logger.info('complete', {command: 'execute', pluginCount: results.size}) diff --git a/cli/src/commands/HelpCommand.ts b/cli/src/commands/HelpCommand.ts index 2ceb187b..285b72b4 100644 --- a/cli/src/commands/HelpCommand.ts +++ b/cli/src/commands/HelpCommand.ts @@ -12,8 +12,7 @@ USAGE: ${CLI_NAME} Run the sync pipeline (default) ${CLI_NAME} help Show this help message ${CLI_NAME} version Show version information - ${CLI_NAME} outdated Check for version updates - ${CLI_NAME} init Initialize directories and files + ${CLI_NAME} init Deprecated; no longer initializes aindex ${CLI_NAME} dry-run Preview what would be written ${CLI_NAME} clean Remove all generated files ${CLI_NAME} clean --dry-run Preview what would be cleaned @@ -22,8 +21,7 @@ USAGE: SUBCOMMANDS: help Show this help message version Show version information - outdated Check if CLI version is outdated against npm registry - init Initialize directory structure based on configuration + init Deprecated; keep public target-relative definitions manually dry-run Preview changes without writing files clean Remove all generated output files and directories config Set configuration values in global config file (~/.aindex/.tnmsc.json) diff --git a/cli/src/commands/InitCommand.test.ts b/cli/src/commands/InitCommand.test.ts new file mode 100644 index 00000000..3224c8f6 --- /dev/null +++ b/cli/src/commands/InitCommand.test.ts @@ -0,0 +1,78 @@ +import type {CommandContext} from './Command' +import * as fs from 'node:fs' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger, FilePathKind} from '../plugins/plugin-core' +import {InitCommand} from './InitCommand' + +function createCommandContext(): CommandContext { + const workspaceDir = path.resolve('tmp-init-command') + const userConfigOptions = mergeConfig({workspaceDir}) + + return { + logger: createLogger('InitCommandTest', 'error'), + outputPlugins: [], + userConfigOptions, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + } + }, + createCleanContext: dryRun => ({ + logger: createLogger('InitCommandTest', 'error'), + fs, + path, + glob, + dryRun, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + } + } + }) as CommandContext['createCleanContext'] extends (dryRun: boolean) => infer T ? T : never, + createWriteContext: dryRun => ({ + logger: createLogger('InitCommandTest', 'error'), + fs, + path, + glob, + dryRun, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [] + } + } + }) as CommandContext['createWriteContext'] extends (dryRun: boolean) => infer T ? T : never + } +} + +describe('init command', () => { + it('returns a deprecation failure without creating files', async () => { + const result = await new InitCommand().execute(createCommandContext()) + + expect(result.success).toBe(false) + expect(result.filesAffected).toBe(0) + expect(result.dirsAffected).toBe(0) + expect(result.message).toContain('deprecated') + expect(result.message).toContain('~/workspace/aindex/public/') + }) +}) diff --git a/cli/src/commands/InitCommand.ts b/cli/src/commands/InitCommand.ts new file mode 100644 index 00000000..eb3d9600 --- /dev/null +++ b/cli/src/commands/InitCommand.ts @@ -0,0 +1,20 @@ +import type {Command, CommandContext, CommandResult} from './Command' + +const INIT_DEPRECATION_MESSAGE = '`tnmsc init` is deprecated and no longer initializes aindex. Maintain the public target-relative definitions manually under `~/workspace/aindex/public/`.' + +export class InitCommand implements Command { + readonly name = 'init' + + async execute(ctx: CommandContext): Promise { + const {logger} = ctx + + logger.warn('deprecated init command invoked', {command: 'init'}) + + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: INIT_DEPRECATION_MESSAGE + } + } +} diff --git a/cli/src/commands/JsonOutputCommand.ts b/cli/src/commands/JsonOutputCommand.ts index 6e3b2a66..55f5bdba 100644 --- a/cli/src/commands/JsonOutputCommand.ts +++ b/cli/src/commands/JsonOutputCommand.ts @@ -42,7 +42,7 @@ export function toJsonCommandResult(result: CommandResult): JsonCommandResult { dirsAffected: result.dirsAffected, ...result.message != null && {message: result.message}, pluginResults: [], - errors: [] + errors: result.success || result.message == null ? [] : [result.message] } return json } diff --git a/cli/src/commands/OutdatedCommand.ts b/cli/src/commands/OutdatedCommand.ts deleted file mode 100644 index bc86bde5..00000000 --- a/cli/src/commands/OutdatedCommand.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {checkVersion, logVersionCheckResult} from '@/versionCheck' - -/** - * Outdated command - check if CLI version is outdated - */ -export class OutdatedCommand implements Command { - readonly name = 'outdated' - - async execute(ctx: CommandContext): Promise { - const result = await checkVersion() - logVersionCheckResult(result, ctx.logger) - - return { - success: true, - filesAffected: 0, - dirsAffected: 0, - message: `Version status: ${result.status}` - } - } -} diff --git a/cli/src/commands/PluginsCommand.ts b/cli/src/commands/PluginsCommand.ts index 8a040cb6..454a1e9b 100644 --- a/cli/src/commands/PluginsCommand.ts +++ b/cli/src/commands/PluginsCommand.ts @@ -1,6 +1,6 @@ import type {Command, CommandContext, CommandResult, JsonPluginInfo} from './Command' import process from 'node:process' -import {PluginKind} from '../plugins/plugin-shared' +import {PluginKind} from '../plugins/plugin-core' /** * Command that outputs all registered plugin information as JSON. diff --git a/cli/src/commands/ProtectedDeletionCommands.test.ts b/cli/src/commands/ProtectedDeletionCommands.test.ts new file mode 100644 index 00000000..82777d5f --- /dev/null +++ b/cli/src/commands/ProtectedDeletionCommands.test.ts @@ -0,0 +1,205 @@ +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin, OutputWriteContext} from '../plugins/plugin-core' +import type {CommandContext} from './Command' +import * as fs from 'node:fs' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it, vi} from 'vitest' +import {FilePathKind, PluginKind} from '../plugins/plugin-core' +import {CleanCommand} from './CleanCommand' +import {DryRunCleanCommand} from './DryRunCleanCommand' +import {ExecuteCommand} from './ExecuteCommand' +import {JsonOutputCommand} from './JsonOutputCommand' + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} + } as ILogger +} + +function createMockOutputPlugin( + cleanup?: OutputCleanupDeclarations, + convertContent?: OutputPlugin['convertContent'] +): OutputPlugin { + return { + type: PluginKind.Output, + name: 'MockOutputPlugin', + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return [{path: path.join(path.resolve('tmp-workspace-command'), 'project-a', 'AGENTS.md'), source: {}}] + }, + async declareCleanupPaths() { + return cleanup ?? {} + }, + async convertContent(declaration, ctx) { + if (convertContent != null) return convertContent(declaration, ctx) + return 'test' + } + } +} + +function createCommandContext(outputPlugins: readonly OutputPlugin[]): CommandContext { + const workspaceDir = path.resolve('tmp-workspace-command') + const aindexDir = path.join(workspaceDir, 'aindex') + const collectedOutputContext = { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [{ + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + } + }] + }, + aindexDir + } + + return { + logger: createMockLogger(), + outputPlugins, + collectedOutputContext, + userConfigOptions: { + version: '0.0.0', + workspaceDir, + logLevel: 'info', + aindex: { + dir: 'aindex', + skills: {src: 'skills', dist: 'dist/skills'}, + commands: {src: 'commands', dist: 'dist/commands'}, + subAgents: {src: 'subagents', dist: 'dist/subagents'}, + rules: {src: 'rules', dist: 'dist/rules'}, + globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, + workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, + app: {src: 'app', dist: 'dist/app'}, + ext: {src: 'ext', dist: 'dist/ext'}, + arch: {src: 'arch', dist: 'dist/arch'} + }, + commandSeriesOptions: {}, + outputScopes: {}, + cleanupProtection: {}, + plugins: [] + }, + createCleanContext: (dryRun: boolean): OutputCleanContext => ({ + logger: createMockLogger(), + fs, + path, + glob, + collectedOutputContext, + pluginOptions: { + version: '0.0.0', + workspaceDir, + logLevel: 'info', + aindex: { + dir: 'aindex', + skills: {src: 'skills', dist: 'dist/skills'}, + commands: {src: 'commands', dist: 'dist/commands'}, + subAgents: {src: 'subagents', dist: 'dist/subagents'}, + rules: {src: 'rules', dist: 'dist/rules'}, + globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, + workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, + app: {src: 'app', dist: 'dist/app'}, + ext: {src: 'ext', dist: 'dist/ext'}, + arch: {src: 'arch', dist: 'dist/arch'} + }, + commandSeriesOptions: {}, + outputScopes: {}, + cleanupProtection: {}, + plugins: [] + }, + dryRun + }), + createWriteContext: (dryRun: boolean): OutputWriteContext => ({ + logger: createMockLogger(), + fs, + path, + glob, + collectedOutputContext, + dryRun, + registeredPluginNames: outputPlugins.map(plugin => plugin.name) + }) + } +} + +describe('protected deletion commands', () => { + it('returns failure for clean and dry-run-clean when cleanup hits a protected path', async () => { + const workspaceDir = path.resolve('tmp-workspace-command') + const plugin = createMockOutputPlugin({ + delete: [{kind: 'directory', path: workspaceDir}] + }) + const ctx = createCommandContext([plugin]) + + await expect(new CleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ + success: false, + message: expect.stringContaining('Protected deletion guard blocked cleanup') + })) + await expect(new DryRunCleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ + success: false, + message: expect.stringContaining('Protected deletion guard blocked cleanup') + })) + }) + + it('returns failure before writes run when execute pre-cleanup hits a protected path', async () => { + const workspaceDir = path.resolve('tmp-workspace-command') + const convertContent = vi.fn(async () => 'should-not-write') + const plugin = createMockOutputPlugin({ + delete: [{kind: 'directory', path: workspaceDir}] + }, convertContent) + const ctx = createCommandContext([plugin]) + + await expect(new ExecuteCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ + success: false, + message: expect.stringContaining('Protected deletion guard blocked cleanup') + })) + expect(convertContent).not.toHaveBeenCalled() + }) + + it('returns failure when an output path conflicts with a cleanup protect declaration', async () => { + const outputPath = path.join(path.resolve('tmp-workspace-command'), 'project-a', 'AGENTS.md') + const plugin = createMockOutputPlugin({ + protect: [{kind: 'file', path: outputPath}] + }) + const ctx = createCommandContext([plugin]) + + await expect(new CleanCommand().execute(ctx)).resolves.toEqual(expect.objectContaining({ + success: false, + message: expect.stringContaining('Cleanup protection conflict') + })) + }) + + it('includes the failure message in JSON output errors', async () => { + const writeSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + const command = new JsonOutputCommand({ + name: 'mock', + async execute() { + return { + success: false, + filesAffected: 0, + dirsAffected: 0, + message: 'blocked' + } + } + }) + + try { + await command.execute(createCommandContext([])) + expect(writeSpy).toHaveBeenCalledOnce() + expect(String(writeSpy.mock.calls[0]?.[0])).toContain('"errors":["blocked"]') + } + finally { + writeSpy.mockRestore() + } + }) +}) diff --git a/cli/src/commands/factories/CleanCommandFactory.ts b/cli/src/commands/factories/CleanCommandFactory.ts index 8232337d..017d1025 100644 --- a/cli/src/commands/factories/CleanCommandFactory.ts +++ b/cli/src/commands/factories/CleanCommandFactory.ts @@ -1,6 +1,6 @@ import type {Command} from '../Command' import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {CleanCommand} from '../CleanCommand' import {DryRunCleanCommand} from '../DryRunCleanCommand' diff --git a/cli/src/commands/factories/ConfigCommandFactory.ts b/cli/src/commands/factories/ConfigCommandFactory.ts index 95edea98..bc7b6fe0 100644 --- a/cli/src/commands/factories/ConfigCommandFactory.ts +++ b/cli/src/commands/factories/ConfigCommandFactory.ts @@ -1,6 +1,6 @@ import type {Command} from '../Command' import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {ConfigCommand} from '../ConfigCommand' import {ConfigShowCommand} from '../ConfigShowCommand' diff --git a/cli/src/commands/factories/DryRunCommandFactory.ts b/cli/src/commands/factories/DryRunCommandFactory.ts index b827e043..eaa4ba7d 100644 --- a/cli/src/commands/factories/DryRunCommandFactory.ts +++ b/cli/src/commands/factories/DryRunCommandFactory.ts @@ -1,6 +1,6 @@ import type {Command} from '../Command' import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {DryRunOutputCommand} from '../DryRunOutputCommand' /** diff --git a/cli/src/commands/factories/ExecuteCommandFactory.ts b/cli/src/commands/factories/ExecuteCommandFactory.ts index 248688bf..3a47667c 100644 --- a/cli/src/commands/factories/ExecuteCommandFactory.ts +++ b/cli/src/commands/factories/ExecuteCommandFactory.ts @@ -1,6 +1,6 @@ import type {Command} from '../Command' import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {ExecuteCommand} from '../ExecuteCommand' /** diff --git a/cli/src/commands/factories/HelpCommandFactory.ts b/cli/src/commands/factories/HelpCommandFactory.ts index ab8599e0..fa11beb3 100644 --- a/cli/src/commands/factories/HelpCommandFactory.ts +++ b/cli/src/commands/factories/HelpCommandFactory.ts @@ -1,6 +1,6 @@ import type {Command} from '../Command' import type {PrioritizedCommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {FactoryPriority} from '../CommandFactory' import {HelpCommand} from '../HelpCommand' diff --git a/cli/src/commands/factories/InitCommandFactory.ts b/cli/src/commands/factories/InitCommandFactory.ts new file mode 100644 index 00000000..d9622527 --- /dev/null +++ b/cli/src/commands/factories/InitCommandFactory.ts @@ -0,0 +1,14 @@ +import type {Command} from '../Command' +import type {CommandFactory} from '../CommandFactory' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' +import {InitCommand} from '../InitCommand' + +export class InitCommandFactory implements CommandFactory { + canHandle(args: ParsedCliArgs): boolean { + return args.subcommand === 'init' + } + + createCommand(_args: ParsedCliArgs): Command { + return new InitCommand() + } +} diff --git a/cli/src/commands/factories/OutdatedCommandFactory.ts b/cli/src/commands/factories/OutdatedCommandFactory.ts deleted file mode 100644 index ed709e71..00000000 --- a/cli/src/commands/factories/OutdatedCommandFactory.ts +++ /dev/null @@ -1,18 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline' -import {OutdatedCommand} from '../OutdatedCommand' - -/** - * Factory for creating OutdatedCommand - * Handles 'outdated' subcommand - */ -export class OutdatedCommandFactory implements CommandFactory { - canHandle(args: ParsedCliArgs): boolean { - return args.subcommand === 'outdated' - } - - createCommand(_args: ParsedCliArgs): Command { - return new OutdatedCommand() - } -} diff --git a/cli/src/commands/factories/PluginsCommandFactory.ts b/cli/src/commands/factories/PluginsCommandFactory.ts index e92bab56..2992beca 100644 --- a/cli/src/commands/factories/PluginsCommandFactory.ts +++ b/cli/src/commands/factories/PluginsCommandFactory.ts @@ -1,6 +1,6 @@ import type {Command} from '../Command' import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {PluginsCommand} from '../PluginsCommand' /** diff --git a/cli/src/commands/factories/UnknownCommandFactory.ts b/cli/src/commands/factories/UnknownCommandFactory.ts index c57c8481..a652ffe5 100644 --- a/cli/src/commands/factories/UnknownCommandFactory.ts +++ b/cli/src/commands/factories/UnknownCommandFactory.ts @@ -1,6 +1,6 @@ import type {Command} from '../Command' import type {PrioritizedCommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {FactoryPriority} from '../CommandFactory' import {UnknownCommand} from '../UnknownCommand' diff --git a/cli/src/commands/factories/VersionCommandFactory.ts b/cli/src/commands/factories/VersionCommandFactory.ts index dcc38974..624a3945 100644 --- a/cli/src/commands/factories/VersionCommandFactory.ts +++ b/cli/src/commands/factories/VersionCommandFactory.ts @@ -1,6 +1,6 @@ import type {Command} from '../Command' import type {PrioritizedCommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline' +import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' import {FactoryPriority} from '../CommandFactory' import {VersionCommand} from '../VersionCommand' diff --git a/cli/src/commands/factories/index.ts b/cli/src/commands/factories/index.ts deleted file mode 100644 index 4c1e174e..00000000 --- a/cli/src/commands/factories/index.ts +++ /dev/null @@ -1,38 +0,0 @@ -export type { - CommandFactory, - PrioritizedCommandFactory -} from '../CommandFactory' // Command Factory exports -export { - FactoryPriority -} from '../CommandFactory' -export { - CommandRegistry -} from '../CommandRegistry' - -export { - CleanCommandFactory -} from './CleanCommandFactory' -export { - ConfigCommandFactory -} from './ConfigCommandFactory' -export { - DryRunCommandFactory -} from './DryRunCommandFactory' -export { - ExecuteCommandFactory -} from './ExecuteCommandFactory' -export { - HelpCommandFactory -} from './HelpCommandFactory' -export { - OutdatedCommandFactory -} from './OutdatedCommandFactory' -export { - PluginsCommandFactory -} from './PluginsCommandFactory' -export { - UnknownCommandFactory -} from './UnknownCommandFactory' -export { - VersionCommandFactory -} from './VersionCommandFactory' // Factory implementations diff --git a/cli/src/commands/help.rs b/cli/src/commands/help.rs index 1187915a..94b02bd9 100644 --- a/cli/src/commands/help.rs +++ b/cli/src/commands/help.rs @@ -13,7 +13,6 @@ pub fn execute() -> ExitCode { println!(" config Set or show configuration values"); println!(" plugins List all registered plugins"); println!(" version Show version information"); - println!(" outdated Check if CLI version is outdated"); println!(" help Show this help message"); println!(); println!("OPTIONS:"); diff --git a/cli/src/commands/index.ts b/cli/src/commands/index.ts deleted file mode 100644 index df10df91..00000000 --- a/cli/src/commands/index.ts +++ /dev/null @@ -1,30 +0,0 @@ -export * from './CleanCommand' -export * from './CleanupUtils' -export * from './Command' -export type { - CommandFactory, - PrioritizedCommandFactory -} from './CommandFactory' // Command Factory exports -export { - FactoryPriority -} from './CommandFactory' -export { - CommandRegistry -} from './CommandRegistry' -export { - createDefaultCommandRegistry -} from './CommandRegistryFactory' -export * from './CommandUtils' -export * from './ConfigCommand' -export * from './ConfigShowCommand' -export * from './DryRunCleanCommand' -export * from './DryRunOutputCommand' -export * from './ExecuteCommand' -export * from './factories' // Factory implementations -export * from './HelpCommand' -export * from './JsonOutputCommand' -export * from './OutdatedCommand' -export * from './PluginsCommand' -export * from './UnknownCommand' - -export * from './VersionCommand' diff --git a/cli/src/commands/mod.rs b/cli/src/commands/mod.rs index f2c252ff..71cc954f 100644 --- a/cli/src/commands/mod.rs +++ b/cli/src/commands/mod.rs @@ -1,6 +1,5 @@ pub mod help; pub mod version; -pub mod outdated; pub mod config_cmd; pub mod config_show; pub mod bridge; diff --git a/cli/src/commands/outdated.rs b/cli/src/commands/outdated.rs deleted file mode 100644 index a8bc0ebe..00000000 --- a/cli/src/commands/outdated.rs +++ /dev/null @@ -1,34 +0,0 @@ -use std::process::ExitCode; - -use tnmsc_logger::create_logger; - -pub fn execute() -> ExitCode { - let logger = create_logger("outdated", None); - let current = env!("CARGO_PKG_VERSION"); - - let output = std::process::Command::new("npm") - .args(["view", "@truenine/memory-sync-cli", "version", "--json"]) - .output(); - - match output { - Ok(out) if out.status.success() => { - let raw = String::from_utf8_lossy(&out.stdout); - let latest = raw.trim().trim_matches('"'); - if latest == current { - println!("tnmsc is up to date: {current}"); - } else { - println!("tnmsc is outdated: {current} → {latest}"); - println!("Run: npm install -g @truenine/memory-sync-cli"); - return ExitCode::from(1); - } - ExitCode::SUCCESS - } - _ => { - logger.warn( - serde_json::Value::String("Could not check npm registry for latest version".into()), - None, - ); - ExitCode::SUCCESS - } - } -} diff --git a/cli/src/config.outputScopes.test.ts b/cli/src/config.outputScopes.test.ts new file mode 100644 index 00000000..a5b9e7ae --- /dev/null +++ b/cli/src/config.outputScopes.test.ts @@ -0,0 +1,45 @@ +import {describe, expect, it} from 'vitest' +import {mergeConfig} from './config' + +describe('mergeConfig outputScopes', () => { + it('merges plugin topic overrides deeply', () => { + const merged = mergeConfig( + { + outputScopes: { + plugins: { + CursorOutputPlugin: { + commands: 'global', + skills: ['workspace', 'global'] + } + } + } + }, + { + outputScopes: { + plugins: { + CursorOutputPlugin: { + rules: 'project', + skills: 'project' + }, + OpencodeCLIOutputPlugin: { + mcp: 'global' + } + } + } + } + ) + + expect(merged.outputScopes).toEqual({ + plugins: { + CursorOutputPlugin: { + commands: 'global', + skills: 'project', + rules: 'project' + }, + OpencodeCLIOutputPlugin: { + mcp: 'global' + } + } + }) + }) +}) diff --git a/cli/src/config.test.ts b/cli/src/config.test.ts new file mode 100644 index 00000000..b4dbd27e --- /dev/null +++ b/cli/src/config.test.ts @@ -0,0 +1,83 @@ +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {afterEach, describe, expect, it, vi} from 'vitest' +import {defineConfig} from './config' +import {GitIgnoreInputPlugin} from './inputs/input-gitignore' +import {WorkspaceInputPlugin} from './inputs/input-workspace' + +describe('defineConfig', () => { + afterEach(() => vi.restoreAllMocks()) + + it('loads a project-local config when no global config is available', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-workspace-')) + + const localConfigPath = path.join(tempWorkspace, '.tnmsc.json') + fs.writeFileSync(localConfigPath, JSON.stringify({ + workspaceDir: tempWorkspace, + aindex: { + dir: 'aindex', + skills: {src: 'skills', dist: 'dist/skills'}, + commands: {src: 'commands', dist: 'dist/commands'}, + subAgents: {src: 'subagents', dist: 'dist/subagents'}, + rules: {src: 'rules', dist: 'dist/rules'}, + globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, + workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, + app: {src: 'app', dist: 'dist/app'}, + ext: {src: 'ext', dist: 'dist/ext'}, + arch: {src: 'arch', dist: 'dist/arch'} + }, + logLevel: 'info' + }), 'utf8') + + try { + const result = await defineConfig({ + cwd: tempWorkspace, + configLoaderOptions: {searchGlobal: false}, + pluginOptions: { + plugins: [new WorkspaceInputPlugin()] + } + }) + + expect(result.userConfigOptions.workspaceDir).toBe(tempWorkspace) + expect(result.context.workspace.directory.path).toBe(tempWorkspace) + expect(result.context.aindexDir).toBe(path.join(tempWorkspace, 'aindex')) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('passes pipeline args into public proxy resolution', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-public-proxy-command-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const publicDir = path.join(aindexDir, 'public') + + fs.mkdirSync(path.join(publicDir, 'execute'), {recursive: true}) + fs.mkdirSync(path.join(publicDir, 'dry-run'), {recursive: true}) + fs.writeFileSync(path.join(publicDir, 'proxy.ts'), [ + 'export default (_logicalPath, ctx) => ctx.command === "dry-run"', + ' ? "dry-run/gitignore"', + ' : "execute/gitignore"', + '' + ].join('\n'), 'utf8') + fs.writeFileSync(path.join(publicDir, 'execute', 'gitignore'), 'execute\n', 'utf8') + fs.writeFileSync(path.join(publicDir, 'dry-run', 'gitignore'), 'dry-run\n', 'utf8') + + try { + const result = await defineConfig({ + loadUserConfig: false, + pipelineArgs: ['node', 'tnmsc', 'dry-run'], + pluginOptions: { + workspaceDir: tempWorkspace, + plugins: [new WorkspaceInputPlugin(), new GitIgnoreInputPlugin()] + } + }) + + expect(result.context.globalGitIgnore).toBe('dry-run\n') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/config.ts b/cli/src/config.ts index 625098c5..b80abdd5 100644 --- a/cli/src/config.ts +++ b/cli/src/config.ts @@ -1,18 +1,37 @@ -import type {AindexConfig, CollectedInputContext, CommandSeriesOptions, CommandSeriesPluginOverride, ConfigLoaderOptions, InputPlugin, InputPluginContext, OutputPlugin, PluginOptions, UserConfigFile} from './plugins/plugin-shared' +import type { + AindexConfig, + CleanupProtectionOptions, + CommandSeriesOptions, + CommandSeriesPluginOverride, + ConfigLoaderOptions, + InputCollectedContext, + InputPlugin, + InputPluginContext, + OutputCollectedContext, + OutputPlugin, + OutputScopeOptions, + PluginOptions, + PluginOutputScopeTopics, + UserConfigFile +} from './plugins/plugin-core' import * as fs from 'node:fs' import * as path from 'node:path' -import process from 'node:process' import glob from 'fast-glob' import {checkVersionControl} from './Aindex' -import {loadUserConfig, validateGlobalConfig} from './ConfigLoader' +import {getConfigLoader} from './ConfigLoader' import {PluginPipeline} from './PluginPipeline' -import {createLogger, PluginKind} from './plugins/plugin-shared' +import { + createLogger, + PluginKind, + toOutputCollectedContext, + validateOutputScopeOverridesForPlugins +} from './plugins/plugin-core' /** * Pipeline configuration containing collected context and output plugins */ export interface PipelineConfig { - readonly context: CollectedInputContext + readonly context: OutputCollectedContext readonly outputPlugins: readonly OutputPlugin[] readonly userConfigOptions: Required } @@ -23,8 +42,8 @@ const DEFAULT_AINDEX: Required = { commands: {src: 'commands', dist: 'dist/commands'}, subAgents: {src: 'subagents', dist: 'dist/subagents'}, rules: {src: 'rules', dist: 'dist/rules'}, - globalPrompt: {src: 'global.cn.mdx', dist: 'dist/global.mdx'}, - workspacePrompt: {src: 'workspace.cn.mdx', dist: 'dist/workspace.mdx'}, + globalPrompt: {src: 'global.src.mdx', dist: 'dist/global.mdx'}, + workspacePrompt: {src: 'workspace.src.mdx', dist: 'dist/workspace.mdx'}, app: {src: 'app', dist: 'dist/app'}, ext: {src: 'ext', dist: 'dist/ext'}, arch: {src: 'arch', dist: 'dist/arch'} @@ -36,6 +55,8 @@ const DEFAULT_OPTIONS: Required = { logLevel: 'info', aindex: DEFAULT_AINDEX, commandSeriesOptions: {}, + outputScopes: {}, + cleanupProtection: {}, plugins: [] } @@ -49,6 +70,8 @@ function userConfigToPluginOptions(userConfig: UserConfigFile): Partial { const overridePlugins = override.plugins const overrideCommandSeries = override.commandSeriesOptions + const overrideOutputScopes = override.outputScopes + const overrideCleanupProtection = override.cleanupProtection return { ...base, @@ -95,7 +122,9 @@ function mergeTwoConfigs( ...base.plugins, ...overridePlugins ?? [] ], - commandSeriesOptions: mergeCommandSeriesOptions(base.commandSeriesOptions, overrideCommandSeries) // Deep merge for commandSeriesOptions + commandSeriesOptions: mergeCommandSeriesOptions(base.commandSeriesOptions, overrideCommandSeries), // Deep merge for commandSeriesOptions + outputScopes: mergeOutputScopeOptions(base.outputScopes, overrideOutputScopes), + cleanupProtection: mergeCleanupProtectionOptions(base.cleanupProtection, overrideCleanupProtection) } } @@ -149,11 +178,64 @@ function mergeCommandSeriesOptions( return {} } +function mergeOutputScopeTopics( + base?: PluginOutputScopeTopics, + override?: PluginOutputScopeTopics +): PluginOutputScopeTopics | undefined { + if (base == null && override == null) return void 0 + if (base == null) return override + if (override == null) return base + return {...base, ...override} +} + +function mergeOutputScopeOptions( + base?: OutputScopeOptions, + override?: OutputScopeOptions +): OutputScopeOptions { + if (override == null) return base ?? {} + if (base == null) return override + + const mergedPlugins: Record = {} + if (base.plugins != null) { + for (const [pluginName, topics] of Object.entries(base.plugins)) { + if (topics != null) mergedPlugins[pluginName] = {...topics} + } + } + if (override.plugins != null) { + for (const [pluginName, topics] of Object.entries(override.plugins)) { + const mergedTopics = mergeOutputScopeTopics(mergedPlugins[pluginName], topics) + if (mergedTopics != null) mergedPlugins[pluginName] = mergedTopics + } + } + + if (Object.keys(mergedPlugins).length === 0) return {} + return {plugins: mergedPlugins} +} + +function mergeCleanupProtectionOptions( + base?: CleanupProtectionOptions, + override?: CleanupProtectionOptions +): CleanupProtectionOptions { + if (override == null) return base ?? {} + if (base == null) return override + + return { + rules: [ + ...base.rules ?? [], + ...override.rules ?? [] + ] + } +} + /** * Check if options is DefineConfigOptions */ function isDefineConfigOptions(options: PluginOptions | DefineConfigOptions): options is DefineConfigOptions { - return 'pluginOptions' in options || 'configLoaderOptions' in options || 'loadUserConfig' in options + return 'pluginOptions' in options + || 'configLoaderOptions' in options + || 'loadUserConfig' in options + || 'cwd' in options + || 'pipelineArgs' in options } /** @@ -168,23 +250,30 @@ function isDefineConfigOptions(options: PluginOptions | DefineConfigOptions): op * @param options - Plugin options or DefineConfigOptions */ export async function defineConfig(options: PluginOptions | DefineConfigOptions = {}): Promise { - const validationResult = validateGlobalConfig() // Validate global config exists and is valid - do not auto-create - if (!validationResult.valid) { - const logger = createLogger('defineConfig') // Log all errors before exiting - for (const error of validationResult.errors) logger.error(error) - process.exit(1) - } - let shouldLoadUserConfig: boolean, // Normalize options cwd: string | undefined, - pluginOptions: PluginOptions + pluginOptions: PluginOptions, + configLoaderOptions: ConfigLoaderOptions | undefined, + pipelineArgs: readonly string[] | undefined if (isDefineConfigOptions(options)) { - ({pluginOptions = {}, cwd} = {pluginOptions: options.pluginOptions, cwd: options.cwd}) + ({ + pluginOptions = {}, + cwd, + configLoaderOptions, + pipelineArgs + } = { + pluginOptions: options.pluginOptions, + cwd: options.cwd, + configLoaderOptions: options.configLoaderOptions, + pipelineArgs: options.pipelineArgs + }) shouldLoadUserConfig = options.loadUserConfig ?? true } else { pluginOptions = options shouldLoadUserConfig = true + configLoaderOptions = void 0 + pipelineArgs = void 0 } let userConfigOptions: Partial = {} // Load user config if enabled @@ -194,7 +283,7 @@ export async function defineConfig(options: PluginOptions | DefineConfigOptions if (shouldLoadUserConfig) { try { - const userConfigResult = loadUserConfig(cwd) + const userConfigResult = getConfigLoader(configLoaderOptions).load(cwd) userConfigFound = userConfigResult.found userConfigSources = userConfigResult.sources if (userConfigResult.found) { @@ -203,22 +292,23 @@ export async function defineConfig(options: PluginOptions | DefineConfigOptions } } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) // Config loading failed - throw error instead of using defaults + const errorMessage = error instanceof Error ? error.message : String(error) throw new Error(`Failed to load user config: ${errorMessage}`) } } - if (!userConfigFound) { // Require user config to be found - no fallback to defaults - throw new Error( - 'No user config found. Please create ~/.aindex/.tnmsc.json or a .tnmsc.json in your working directory.' - ) - } - const mergedOptions = mergeConfig(userConfigOptions, pluginOptions) // Merge: defaults <- user config <- programmatic options const {plugins = [], logLevel} = mergedOptions const logger = createLogger('defineConfig', logLevel) - logger.info('user config loaded', {sources: userConfigSources}) + if (userConfigFound) logger.info('user config loaded', {sources: userConfigSources}) + else { + logger.info('no user config found, using defaults/programmatic options', { + workspaceDir: mergedOptions.workspaceDir, + aindexDir: mergedOptions.aindex.dir, + logLevel: mergedOptions.logLevel + }) + } const baseCtx: Omit = { // Base context without dependencyContext, globalScope, scopeRegistry (will be provided by pipeline) logger, @@ -230,13 +320,16 @@ export async function defineConfig(options: PluginOptions | DefineConfigOptions const inputPlugins = plugins.filter((p): p is InputPlugin => p.type === PluginKind.Input) // Filter plugins by type const outputPlugins = plugins.filter((p): p is OutputPlugin => p.type === PluginKind.Output) + validateOutputScopeOverridesForPlugins(outputPlugins, mergedOptions) - const pipeline = new PluginPipeline() // Pass userConfigFile for GlobalScopeCollector to access profile and tool // Use PluginPipeline to execute plugins in dependency order + const pipeline = pipelineArgs != null + ? new PluginPipeline(...pipelineArgs) + : new PluginPipeline() // Pass userConfigFile for GlobalScopeCollector to access profile and tool // Use PluginPipeline to execute plugins in dependency order const merged = await pipeline.executePluginsInOrder(inputPlugins, baseCtx, false, userConfigFile) if (merged.workspace == null) throw new Error('Workspace not initialized by any plugin') // Validate workspace exists - const context: CollectedInputContext = { + const inputContext: InputCollectedContext = { workspace: merged.workspace, ...merged.vscodeConfigFiles != null && {vscodeConfigFiles: merged.vscodeConfigFiles}, ...merged.jetbrainsConfigFiles != null && {jetbrainsConfigFiles: merged.jetbrainsConfigFiles}, @@ -253,6 +346,8 @@ export async function defineConfig(options: PluginOptions | DefineConfigOptions ...merged.shadowGitExclude != null && {shadowGitExclude: merged.shadowGitExclude} } + const context = toOutputCollectedContext(inputContext) + if (merged.aindexDir != null) checkVersionControl(merged.aindexDir, logger) // Check version control status for aindex return {context, outputPlugins, userConfigOptions: mergedOptions} diff --git a/cli/src/config/ConfigService.ts b/cli/src/config/ConfigService.ts deleted file mode 100644 index a9ff3269..00000000 --- a/cli/src/config/ConfigService.ts +++ /dev/null @@ -1,193 +0,0 @@ -/** - * Configuration service for the TNMSC configuration system. - * - * This module provides a singleton service for loading, validating, - * and accessing configuration from ~/.aindex/.tnmsc.json - */ - -import type {ConfigLoadResult, ConfigServiceOptions, TnmscConfig} from './types' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import { - ConfigError, - ConfigFileNotFoundError, - ConfigParseError, - ConfigPermissionError, - ConfigValidationError -} from './errors' -import {clearPathCache} from './pathResolver' -import {validateConfig} from './schema' - -/** - * Default configuration file name. - */ -export const DEFAULT_CONFIG_FILE_NAME = '.tnmsc.json' - -/** - * Default global configuration directory (relative to home). - */ -export const DEFAULT_GLOBAL_CONFIG_DIR = '.aindex' - -/** - * Get the default global configuration file path. - * - * @returns The absolute path to ~/.aindex/.tnmsc.json - */ -export function getDefaultConfigPath(): string { - return path.join(os.homedir(), DEFAULT_GLOBAL_CONFIG_DIR, DEFAULT_CONFIG_FILE_NAME) -} - -/** - * Configuration service singleton for managing TNMSC configuration. - * - * This service provides: - * - Singleton access to configuration across the application - * - Automatic validation of configuration files - * - Runtime configuration reloading - * - Comprehensive error handling - */ -export class ConfigService { - private static instance: ConfigService | null = null - - private config: TnmscConfig | null = null - private configPath: string - private loadError: ConfigError | null = null - - private constructor(options: ConfigServiceOptions = {}) { - this.configPath = options.configPath ?? getDefaultConfigPath() - } - - static getInstance(options?: ConfigServiceOptions): ConfigService { - ConfigService.instance ??= new ConfigService(options) - return ConfigService.instance - } - - static resetInstance(): void { - ConfigService.instance = null - } - - load(): TnmscConfig { - this.loadError = null - - if (!fs.existsSync(this.configPath)) { // Check if file exists - this.loadError = new ConfigFileNotFoundError(this.configPath) - throw this.loadError - } - - let content: string // Read file content - try { - content = fs.readFileSync(this.configPath, 'utf8') - } - catch (error) { - const configError = new ConfigPermissionError( - this.configPath, - error instanceof Error ? error : new Error(String(error)) - ) - this.loadError = configError - throw configError - } - - let parsed: unknown // Parse JSON - try { - parsed = JSON.parse(content) - } - catch (error) { - if (error instanceof SyntaxError) { - const configError = new ConfigParseError(this.configPath, error) - this.loadError = configError - throw configError - } - throw error - } - - try { // Validate configuration - this.config = validateConfig(parsed) - clearPathCache() // Clear path cache when config is reloaded - return this.config - } - catch (error) { - if (error instanceof Error && error.name === 'ZodError') { - const zodError = error as unknown as {issues: {path: (string | number)[], message: string}[]} - const validationErrors = zodError.issues.map( - issue => `${issue.path.join('.')}: ${issue.message}` - ) - const configError = new ConfigValidationError(this.configPath, validationErrors) - this.loadError = configError - throw configError - } - throw error - } - } - - safeLoad(): ConfigLoadResult { - const config = this.load() - return { - config, - source: this.configPath, - found: true - } - } - - reload(): TnmscConfig { - this.config = null - return this.load() - } - - getConfig(): TnmscConfig { - if (this.config === null) { - throw new ConfigError( - 'Configuration has not been loaded. Call load() first.', - this.configPath - ) - } - return this.config - } - - isLoaded(): boolean { - return this.config !== null - } - - getLastError(): ConfigError | null { - return this.loadError - } - - getConfigPath(): string { - return this.configPath - } - - setConfigPath(configPath: string): void { - this.configPath = configPath - this.config = null // Reset loaded config - this.loadError = null - } -} - -/** - * Convenience function to get the ConfigService singleton instance. - * - * @param options - Optional configuration options - * @returns The ConfigService instance - */ -export function getConfigService(options?: ConfigServiceOptions): ConfigService { - return ConfigService.getInstance(options) -} - -/** - * Load configuration using the default ConfigService instance. - * - * @returns The loaded configuration - * @throws {ConfigError} If loading or validation fails - */ -export function loadConfig(): TnmscConfig { - return getConfigService().load() -} - -/** - * Safely load configuration using the default ConfigService instance. - * - * @returns The load result with success flag - */ -export function safeLoadConfig(): ConfigLoadResult { - return getConfigService().safeLoad() -} diff --git a/cli/src/config/accessors.ts b/cli/src/config/accessors.ts deleted file mode 100644 index 5fae229b..00000000 --- a/cli/src/config/accessors.ts +++ /dev/null @@ -1,245 +0,0 @@ -/** - * Configuration accessor functions for the TNMSC configuration system. - * - * This module provides convenient accessor functions for retrieving - * specific configuration values and resolved paths. - */ - -import type { - LogLevel, - ModulePaths, - Profile, - ResolvedModulePaths, - TnmscConfig -} from './types' -import {ConfigService} from './ConfigService' -import { - getAbsoluteWorkspaceDir, - getAindexModulePaths as resolveAindexModulePaths, - resolveAllAindexPaths -} from './pathResolver' - -/** - * Get the configuration from the default ConfigService instance. - * - * @returns The current configuration - * @throws {ConfigError} If configuration hasn't been loaded - */ -export function getConfig(): TnmscConfig { - return ConfigService.getInstance().getConfig() -} - -export function getVersion(config?: TnmscConfig): string { - const cfg = config ?? getConfig() - return cfg.version -} - -/** - * Get the workspace directory from the configuration. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The workspace directory path (with ~ expanded) - */ -export function getWorkspaceDir(config?: TnmscConfig): string { - const cfg = config ?? getConfig() - return cfg.workspaceDir -} - -/** - * Get the absolute workspace directory path. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The absolute workspace directory path - */ -export function getAbsoluteWorkspaceDirPath(config?: TnmscConfig): string { - const cfg = config ?? getConfig() - return getAbsoluteWorkspaceDir(cfg.workspaceDir) -} - -/** - * Get the log level from the configuration. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The log level setting - */ -export function getLogLevel(config?: TnmscConfig): LogLevel { - const cfg = config ?? getConfig() - return cfg.logLevel -} - -/** - * Get the profile information from the configuration. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The user profile - */ -export function getProfile(config?: TnmscConfig): Profile { - const cfg = config ?? getConfig() - return cfg.profile -} - -/** - * Get the aindex configuration. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The aindex configuration - */ -export function getAindexConfig(config?: TnmscConfig): TnmscConfig['aindex'] { - const cfg = config ?? getConfig() - return cfg.aindex -} - -/** - * Get a specific aindex module's paths. - * - * @param moduleName - The name of the module (e.g., 'skills', 'commands') - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The module's src/dist paths - */ -export function getAindexModulePaths( - moduleName: keyof TnmscConfig['aindex'] & string, - config?: TnmscConfig -): ModulePaths { - const cfg = config ?? getConfig() - const modulePaths = cfg.aindex[moduleName] - - if (modulePaths === void 0 || modulePaths === null || typeof modulePaths !== 'object' || !('src' in modulePaths)) { - throw new Error(`Invalid aindex module: ${moduleName}`) - } - - return modulePaths -} - -/** - * Get a specific aindex module's resolved paths (absolute and relative). - * - * @param moduleName - The name of the module (e.g., 'skills', 'commands') - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The resolved module paths - */ -export function getResolvedAindexModulePaths( - moduleName: keyof TnmscConfig['aindex'] & string, - config?: TnmscConfig -): ResolvedModulePaths { - const cfg = config ?? getConfig() - return resolveAindexModulePaths(cfg, moduleName) -} - -/** - * Get all resolved aindex module paths. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns Object with all module paths resolved - */ -export function getAllResolvedAindexPaths(config?: TnmscConfig): ReturnType { - const cfg = config ?? getConfig() - return resolveAllAindexPaths(cfg) -} - -/** - * Get the skills module paths. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The skills module paths - */ -export function getSkillsPaths(config?: TnmscConfig): ModulePaths { - return getAindexModulePaths('skills', config) -} - -/** - * Get the commands module paths. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The commands module paths - */ -export function getCommandsPaths(config?: TnmscConfig): ModulePaths { - return getAindexModulePaths('commands', config) -} - -/** - * Get the sub-agents module paths. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The sub-agents module paths - */ -export function getSubAgentsPaths(config?: TnmscConfig): ModulePaths { - return getAindexModulePaths('subAgents', config) -} - -/** - * Get the rules module paths. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The rules module paths - */ -export function getRulesPaths(config?: TnmscConfig): ModulePaths { - return getAindexModulePaths('rules', config) -} - -/** - * Get the global prompt file paths. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The global prompt file paths - */ -export function getGlobalPromptPaths(config?: TnmscConfig): ModulePaths { - return getAindexModulePaths('globalPrompt', config) -} - -/** - * Get the workspace prompt file paths. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The workspace prompt file paths - */ -export function getWorkspacePromptPaths(config?: TnmscConfig): ModulePaths { - return getAindexModulePaths('workspacePrompt', config) -} - -/** - * Get the app module paths. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The app module paths - */ -export function getAppPaths(config?: TnmscConfig): ModulePaths { - return getAindexModulePaths('app', config) -} - -/** - * Get the ext module paths. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The ext module paths - */ -export function getExtPaths(config?: TnmscConfig): ModulePaths { - return getAindexModulePaths('ext', config) -} - -/** - * Get the arch module paths. - * - * @param config - Optional configuration object (uses loaded config if not provided) - * @returns The arch module paths - */ -export function getArchPaths(config?: TnmscConfig): ModulePaths { - return getAindexModulePaths('arch', config) -} - -/** - * Check if the configuration has been loaded. - * - * @returns True if configuration is loaded - */ -export function isConfigLoaded(): boolean { - return ConfigService.getInstance().isLoaded() -} - -/** - * Reload the configuration from disk. - * - * @returns The reloaded configuration - */ -export function reloadConfig(): TnmscConfig { - return ConfigService.getInstance().reload() -} diff --git a/cli/src/config/errors.ts b/cli/src/config/errors.ts deleted file mode 100644 index ca408f69..00000000 --- a/cli/src/config/errors.ts +++ /dev/null @@ -1,164 +0,0 @@ -/** - * Error classes for the TNMSC configuration system. - * - * This module provides specific error types for different configuration - * failure scenarios, enabling better error handling and user feedback. - */ - -/** - * Base error class for all configuration-related errors. - */ -export class ConfigError extends Error { - readonly configPath: string | undefined - - constructor(message: string, configPath?: string) { - super(message) - this.name = 'ConfigError' - this.configPath = configPath ?? void 0 - - if (Error.captureStackTrace !== void 0 && Error.captureStackTrace !== null) { // Maintain proper stack trace in V8 environments - Error.captureStackTrace(this, ConfigError) - } - } - - override toString(): string { - const pathInfo = this.configPath !== void 0 && this.configPath !== null && this.configPath.length > 0 ? ` (${this.configPath})` : '' - return `${this.name}${pathInfo}: ${this.message}` - } -} - -/** - * Error thrown when the configuration file cannot be found. - */ -export class ConfigFileNotFoundError extends ConfigError { - constructor(configPath: string) { - super(`Configuration file not found: ${configPath}`, configPath) - this.name = 'ConfigFileNotFoundError' - - if (Error.captureStackTrace !== void 0 && Error.captureStackTrace !== null) Error.captureStackTrace(this, ConfigFileNotFoundError) - } -} - -/** - * Error thrown when the configuration file contains invalid JSON. - */ -export class ConfigParseError extends ConfigError { - readonly syntaxError: SyntaxError - - constructor(configPath: string, syntaxError: SyntaxError) { - super(`Invalid JSON in configuration file: ${syntaxError.message}`, configPath) - this.name = 'ConfigParseError' - this.syntaxError = syntaxError - - if (Error.captureStackTrace !== void 0 && Error.captureStackTrace !== null) Error.captureStackTrace(this, ConfigParseError) - } -} - -/** - * Error thrown when the configuration fails schema validation. - */ -export class ConfigValidationError extends ConfigError { - readonly validationErrors: readonly string[] - - constructor(configPath: string, validationErrors: string[]) { - const errorList = validationErrors.join('; ') - super(`Configuration validation failed: ${errorList}`, configPath) - this.name = 'ConfigValidationError' - this.validationErrors = validationErrors - - if (Error.captureStackTrace !== void 0 && Error.captureStackTrace !== null) Error.captureStackTrace(this, ConfigValidationError) - } - - get formattedErrors(): string { - return this.validationErrors.map((err, i) => ` ${i + 1}. ${err}`).join('\n') - } - - override toString(): string { - const pathInfo = this.configPath !== void 0 && this.configPath !== null && this.configPath.length > 0 ? ` (${this.configPath})` : '' - return `${this.name}${pathInfo}:\n${this.formattedErrors}` - } -} - -/** - * Error thrown when path resolution fails. - */ -export class ConfigPathError extends ConfigError { - readonly path: string - - constructor(configPath: string, path: string, reason: string) { - super(`Path resolution failed for "${path}": ${reason}`, configPath) - this.name = 'ConfigPathError' - this.path = path - - if (Error.captureStackTrace !== void 0 && Error.captureStackTrace !== null) Error.captureStackTrace(this, ConfigPathError) - } -} - -/** - * Error thrown when the configuration file cannot be read due to permissions. - */ -export class ConfigPermissionError extends ConfigError { - readonly originalError: Error - - constructor(configPath: string, originalError: Error) { - super(`Cannot read configuration file: ${originalError.message}`, configPath) - this.name = 'ConfigPermissionError' - this.originalError = originalError - - if (Error.captureStackTrace !== void 0 && Error.captureStackTrace !== null) Error.captureStackTrace(this, ConfigPermissionError) - } -} - -/** - * Type guard to check if an error is a ConfigError. - * - * @param error - The error to check - * @returns True if the error is a ConfigError - */ -export function isConfigError(error: unknown): error is ConfigError { - return error instanceof ConfigError -} - -/** - * Type guard to check if an error is a ConfigFileNotFoundError. - * - * @param error - The error to check - * @returns True if the error is a ConfigFileNotFoundError - */ -export function isConfigFileNotFoundError(error: unknown): error is ConfigFileNotFoundError { - return error instanceof ConfigFileNotFoundError -} - -/** - * Type guard to check if an error is a ConfigParseError. - * - * @param error - The error to check - * @returns True if the error is a ConfigParseError - */ -export function isConfigParseError(error: unknown): error is ConfigParseError { - return error instanceof ConfigParseError -} - -/** - * Type guard to check if an error is a ConfigValidationError. - * - * @param error - The error to check - * @returns True if the error is a ConfigValidationError - */ -export function isConfigValidationError(error: unknown): error is ConfigValidationError { - return error instanceof ConfigValidationError -} - -/** - * Format any error into a user-friendly message. - * - * @param error - The error to format - * @returns A formatted error message - */ -export function formatConfigError(error: unknown): string { - if (isConfigError(error)) return error.toString() - - if (error instanceof Error) return `Error: ${error.message}` - - return `Unknown error: ${String(error)}` -} diff --git a/cli/src/config/example.json b/cli/src/config/example.json index 2bd83bdb..a78e2e89 100644 --- a/cli/src/config/example.json +++ b/cli/src/config/example.json @@ -20,11 +20,11 @@ "dist": "dist/rules" }, "globalPrompt": { - "src": "app/global.cn.mdx", + "src": "app/global.src.mdx", "dist": "dist/global.mdx" }, "workspacePrompt": { - "src": "app/workspace.cn.mdx", + "src": "app/workspace.src.mdx", "dist": "dist/workspace.mdx" }, "app": { diff --git a/cli/src/config/index.ts b/cli/src/config/index.ts deleted file mode 100644 index 1f6a8e8e..00000000 --- a/cli/src/config/index.ts +++ /dev/null @@ -1,85 +0,0 @@ -export { // Export accessor functions - getAbsoluteWorkspaceDirPath, - getAindexConfig, - getAllResolvedAindexPaths, - getAppPaths, - getArchPaths, - getCommandsPaths, - getConfig, - getExtPaths, - getGlobalPromptPaths, - getLogLevel, - getAindexModulePaths as getModulePaths, - getProfile, - getResolvedAindexModulePaths, - getRulesPaths, - getSkillsPaths, - getSubAgentsPaths, - getVersion, - getWorkspaceDir, - getWorkspacePromptPaths, - isConfigLoaded, - reloadConfig -} from './accessors' - -export { // Export configuration service - ConfigService, - DEFAULT_CONFIG_FILE_NAME, - DEFAULT_GLOBAL_CONFIG_DIR, - getConfigService, - getDefaultConfigPath, - loadConfig, - safeLoadConfig -} from './ConfigService' - -export { // Export error classes - ConfigError, - ConfigFileNotFoundError, - ConfigParseError, - ConfigPathError, - ConfigPermissionError, - ConfigValidationError, - formatConfigError, - isConfigError, - isConfigFileNotFoundError, - isConfigParseError, - isConfigValidationError -} from './errors' - -export { // Export path resolution utilities - clearPathCache, - expandHomeDir, - getAbsoluteDistPath, - getAbsoluteSrcPath, - getAbsoluteWorkspaceDir, - getAindexModulePaths, - getRelativePath, - isAbsolutePath, - joinPath, - normalizePath, - resolveAllAindexPaths, - resolveModulePaths, - resolveWorkspacePath -} from './pathResolver' - -export { // Export schema and validation - formatValidationErrors, - isValidLogLevel, - safeValidateConfig, - validateConfig, - ZAindexConfig, - ZModulePaths, - ZProfile, - ZTnmscConfig -} from './schema' - -export type { // Export types - AindexConfig, - ConfigLoadResult, - ConfigServiceOptions, - LogLevel, - ModulePaths, - Profile, - ResolvedModulePaths, - TnmscConfig -} from './types' diff --git a/cli/src/config/pathResolver.ts b/cli/src/config/pathResolver.ts deleted file mode 100644 index 8fe443ef..00000000 --- a/cli/src/config/pathResolver.ts +++ /dev/null @@ -1,232 +0,0 @@ -/** - * Path resolution utilities for the TNMSC configuration system. - * - * This module provides functions for resolving paths relative to the - * workspace directory, expanding home directory shortcuts, and caching - * resolved paths for performance. - */ - -import type {ModulePaths, ResolvedModulePaths, TnmscConfig} from './types' -import * as os from 'node:os' -import * as path from 'node:path' -import {ConfigPathError} from './errors' - -/** - * Cache for resolved paths to avoid redundant computations. - */ -const pathCache = new Map() - -/** - * Clear the path cache. - * This should be called when the configuration is reloaded. - */ -export function clearPathCache(): void { - pathCache.clear() -} - -/** - * Get the cache key for a path resolution. - */ -function getCacheKey(workspaceDir: string, relativePath: string): string { - return `${workspaceDir}::${relativePath}` -} - -/** - * Expand the tilde (~) in a path to the user's home directory. - * - * @param inputPath - The path that may contain a tilde - * @returns The path with tilde expanded to the home directory - */ -export function expandHomeDir(inputPath: string): string { - if (!inputPath.startsWith('~')) return inputPath - - const homeDir = os.homedir() - - if (inputPath === '~') return homeDir - - if (inputPath.startsWith('~/') || inputPath.startsWith('~\\')) return path.join(homeDir, inputPath.slice(2)) - - return inputPath // Handle ~username syntax (not supported, return as-is) -} - -/** - * Resolve a path relative to the workspace directory. - * - * @param workspaceDir - The workspace directory (may contain ~) - * @param relativePath - The path relative to the workspace - * @param useCache - Whether to use the path cache - * @returns The absolute resolved path - * @throws {ConfigPathError} If path resolution fails - */ -export function resolveWorkspacePath( - workspaceDir: string, - relativePath: string, - useCache = true -): string { - const cacheKey = getCacheKey(workspaceDir, relativePath) - - if (useCache && pathCache.has(cacheKey)) return pathCache.get(cacheKey)! - - try { - const expandedWorkspace = expandHomeDir(workspaceDir) - const resolvedPath = path.resolve(expandedWorkspace, relativePath) - - if (useCache) pathCache.set(cacheKey, resolvedPath) - - return resolvedPath - } - catch (error) { - const reason = error instanceof Error ? error.message : String(error) - throw new ConfigPathError(workspaceDir, relativePath, reason) - } -} - -/** - * Get the absolute path for a module's source directory. - * - * @param config - The TNMSC configuration - * @param modulePath - The module paths (src/dist pair) - * @returns The absolute source path - */ -export function getAbsoluteSrcPath(config: TnmscConfig, modulePath: ModulePaths): string { - return resolveWorkspacePath(config.workspaceDir, modulePath.src) -} - -/** - * Get the absolute path for a module's distribution directory. - * - * @param config - The TNMSC configuration - * @param modulePath - The module paths (src/dist pair) - * @returns The absolute distribution path - */ -export function getAbsoluteDistPath(config: TnmscConfig, modulePath: ModulePaths): string { - return resolveWorkspacePath(config.workspaceDir, modulePath.dist) -} - -/** - * Get both absolute and relative paths for a module. - * - * @param config - The TNMSC configuration - * @param modulePath - The module paths (src/dist pair) - * @returns Resolved paths with both absolute and relative variants - */ -export function resolveModulePaths( - config: TnmscConfig, - modulePath: ModulePaths -): ResolvedModulePaths { - return { - absoluteSrc: getAbsoluteSrcPath(config, modulePath), - absoluteDist: getAbsoluteDistPath(config, modulePath), - relativeSrc: modulePath.src, - relativeDist: modulePath.dist - } -} - -/** - * Get the absolute workspace directory path. - * - * @param workspaceDir - The workspace directory (may contain ~) - * @returns The absolute workspace directory path - */ -export function getAbsoluteWorkspaceDir(workspaceDir: string): string { - return expandHomeDir(workspaceDir) -} - -/** - * Get the relative path from the workspace directory. - * - * @param workspaceDir - The workspace directory (may contain ~) - * @param absolutePath - The absolute path to make relative - * @returns The relative path from workspace - */ -export function getRelativePath(workspaceDir: string, absolutePath: string): string { - const expandedWorkspace = expandHomeDir(workspaceDir) - return path.relative(expandedWorkspace, absolutePath) -} - -/** - * Check if a path is absolute. - * - * @param inputPath - The path to check - * @returns True if the path is absolute - */ -export function isAbsolutePath(inputPath: string): boolean { - return path.isAbsolute(inputPath) -} - -/** - * Normalize a path for the current platform. - * - * @param inputPath - The path to normalize - * @returns The normalized path - */ -export function normalizePath(inputPath: string): string { - return path.normalize(inputPath) -} - -/** - * Join multiple path segments. - * - * @param segments - The path segments to join - * @returns The joined path - */ -export function joinPath(...segments: string[]): string { - return path.join(...segments) -} - -/** - * Get all resolved paths for the aindex configuration. - * - * @param config - The TNMSC configuration - * @returns Object with all module paths resolved - */ -export function resolveAllAindexPaths(config: TnmscConfig): { - skills: ResolvedModulePaths - commands: ResolvedModulePaths - subAgents: ResolvedModulePaths - rules: ResolvedModulePaths - globalPrompt: ResolvedModulePaths - workspacePrompt: ResolvedModulePaths - app: ResolvedModulePaths - ext: ResolvedModulePaths - arch: ResolvedModulePaths -} { - const {aindex} = config - - return { - skills: resolveModulePaths(config, aindex.skills), - commands: resolveModulePaths(config, aindex.commands), - subAgents: resolveModulePaths(config, aindex.subAgents), - rules: resolveModulePaths(config, aindex.rules), - globalPrompt: resolveModulePaths(config, aindex.globalPrompt), - workspacePrompt: resolveModulePaths(config, aindex.workspacePrompt), - app: resolveModulePaths(config, aindex.app), - ext: resolveModulePaths(config, aindex.ext), - arch: resolveModulePaths(config, aindex.arch) - } -} - -/** - * Get a specific aindex module's resolved paths. - * - * @param config - The TNMSC configuration - * @param moduleName - The name of the module - * @returns The resolved module paths - * @throws {ConfigPathError} If the module name is invalid - */ -export function getAindexModulePaths( - config: TnmscConfig, - moduleName: keyof TnmscConfig['aindex'] & string -): ResolvedModulePaths { - const modulePaths = config.aindex[moduleName] - - if (modulePaths === void 0 || modulePaths === null || typeof modulePaths !== 'object' || !('src' in modulePaths)) { - throw new ConfigPathError( - config.workspaceDir, - moduleName, - `Invalid aindex module: ${moduleName}` - ) - } - - return resolveModulePaths(config, modulePaths) -} diff --git a/cli/src/config/schema.ts b/cli/src/config/schema.ts deleted file mode 100644 index 09e0e84e..00000000 --- a/cli/src/config/schema.ts +++ /dev/null @@ -1,127 +0,0 @@ -/** - * Zod validation schemas for the TNMSC configuration system. - * - * This module provides runtime validation for configuration files, - * ensuring all required fields exist and have valid formats. - */ - -import type { - LogLevel, - ModulePaths, - Profile, - TnmscConfig -} from './types' -import {z} from 'zod/v3' - -const VERSION_REGEX = /^\d{4}\.\d{5}\.\d{5}$/ - -const BIRTHDAY_REGEX = /^\d{4}-\d{2}-\d{2}$/ - -/** - * Valid log level values. - */ -const VALID_LOG_LEVELS: Set = new Set(['trace', 'debug', 'info', 'warn', 'error']) - -/** - * Zod schema for module path pairs (src/dist). - */ -export const ZModulePaths = z.object({ - src: z.string().min(1, 'Source path cannot be empty'), - dist: z.string().min(1, 'Distribution path cannot be empty') -}) satisfies z.ZodType - -/** - * Zod schema for aindex configuration. - * Supports user-defined module paths with src/dist structure. - */ -export const ZAindexConfig = z.object({ - dir: z.string().default('aindex'), - skills: ZModulePaths, - commands: ZModulePaths, - subAgents: ZModulePaths, - rules: ZModulePaths, - globalPrompt: ZModulePaths, - workspacePrompt: ZModulePaths, - app: ZModulePaths, - ext: ZModulePaths, - arch: ZModulePaths -}).catchall(z.union([ZModulePaths, z.string()])) - -/** - * Zod schema for user profile. - */ -export const ZProfile = z.object({ - name: z.string().min(1, 'Profile name cannot be empty'), - username: z.string().min(1, 'Username cannot be empty'), - gender: z.string().min(1, 'Gender cannot be empty'), - birthday: z.string() - .regex(BIRTHDAY_REGEX, 'Birthday must be in YYYY-MM-DD format') -}) satisfies z.ZodType - -/** - * Zod schema for the main TNMSC configuration. - */ -export const ZTnmscConfig = z.object({ - version: z.string() - .regex(VERSION_REGEX, 'Version must be in YYYY.MMDD.HHMM format'), - workspaceDir: z.string().min(1, 'Workspace directory cannot be empty'), - aindex: ZAindexConfig, - logLevel: z.enum(['trace', 'debug', 'info', 'warn', 'error']), - profile: ZProfile -}) - -/** - * Validate a configuration object against the schema. - * - * @param config - The configuration object to validate - * @returns The validated configuration - * @throws {z.ZodError} If validation fails - */ -export function validateConfig(config: unknown): TnmscConfig { - return ZTnmscConfig.parse(config) -} - -/** - * Safely validate a configuration object against the schema. - * - * @param config - The configuration object to validate - * @returns An object with success flag and either data or error - */ -export function safeValidateConfig(config: unknown): - | {success: true, data: TnmscConfig} - | {success: false, error: z.ZodError} { - const result = ZTnmscConfig.safeParse(config) - if (result.success) return {success: true, data: result.data} - return {success: false, error: result.error} -} - -/** - * Format validation errors into human-readable messages. - * - * @param error - The Zod error to format - * @returns Array of error message strings - */ -export function formatValidationErrors(error: z.ZodError): string[] { - return error.issues.map(issue => { - const path = issue.path.length > 0 ? issue.path.join('.') : 'root' - return `${path}: ${issue.message}` - }) -} - -/** - * Check if a value is a valid log level. - * - * @param value - The value to check - * @returns True if the value is a valid log level - */ -export function isValidLogLevel(value: unknown): value is LogLevel { - return typeof value === 'string' && VALID_LOG_LEVELS.has(value as LogLevel) -} - -export { // Re-export types for convenience - type AindexConfig, - type LogLevel, - type ModulePaths, - type Profile, - type TnmscConfig -} from './types' diff --git a/cli/src/config/types.ts b/cli/src/config/types.ts deleted file mode 100644 index 1b3dab60..00000000 --- a/cli/src/config/types.ts +++ /dev/null @@ -1,117 +0,0 @@ -/** - * Configuration types for the TNMSC configuration system. - * - * This module defines TypeScript interfaces that match the exact JSON - * configuration structure located at ~/.aindex/.tnmsc.json - */ - -/** - * Module path pair containing source and distribution paths. - * Both paths are relative to the workspace directory. - */ -export interface ModulePaths { - /** Source path (human-authored files) */ - readonly src: string - /** Output/compiled path (read by the system) */ - readonly dist: string -} - -/** - * Aindex configuration containing all module paths. - * This replaces the previous shadowSourceProject configuration. - * Supports user-defined module paths with src/dist structure. - */ -export interface AindexConfig { - /** Aindex directory name (relative to workspaceDir), default: 'aindex' */ - readonly dir: string - /** Skills module paths */ - readonly skills: ModulePaths - /** Commands module paths */ - readonly commands: ModulePaths - /** Sub-agents module paths */ - readonly subAgents: ModulePaths - /** Rules module paths */ - readonly rules: ModulePaths - /** Global prompt file paths */ - readonly globalPrompt: ModulePaths - /** Workspace prompt file paths */ - readonly workspacePrompt: ModulePaths - /** Application module paths */ - readonly app: ModulePaths - /** Extension module paths */ - readonly ext: ModulePaths - /** Architecture module paths */ - readonly arch: ModulePaths - /** User-defined module paths - allows any additional module configuration */ - readonly [key: string]: ModulePaths | string -} - -/** - * User profile information. - */ -export interface Profile { - /** Display name of the user */ - readonly name: string - /** Username/login identifier */ - readonly username: string - /** Gender of the user */ - readonly gender: string - readonly birthday: string -} - -/** - * Log level options for the application. - */ -export type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error' - -/** - * Main TNMSC configuration interface. - * This matches the structure of ~/.aindex/.tnmsc.json - */ -export interface TnmscConfig { - readonly version: string - /** Workspace directory path (supports ~ for home directory) */ - readonly workspaceDir: string - /** Aindex module configuration */ - readonly aindex: AindexConfig - /** Log level setting */ - readonly logLevel: LogLevel - /** User profile information */ - readonly profile: Profile -} - -/** - * Configuration load result containing the config and metadata. - */ -export interface ConfigLoadResult { - /** The loaded configuration */ - readonly config: TnmscConfig - /** Path to the configuration file */ - readonly source: string - /** Whether the configuration was found and loaded */ - readonly found: boolean -} - -/** - * Configuration service options. - */ -export interface ConfigServiceOptions { - /** Custom path to the configuration file */ - readonly configPath?: string - /** Whether to cache the configuration after loading */ - readonly enableCache?: boolean -} - -/** - * Resolved paths for an aindex module. - */ -export interface ResolvedModulePaths { - /** Absolute source path */ - readonly absoluteSrc: string - /** Absolute distribution path */ - readonly absoluteDist: string - /** Source path relative to workspace */ - readonly relativeSrc: string - /** Distribution path relative to workspace */ - readonly relativeDist: string -} diff --git a/cli/src/constants.ts b/cli/src/constants.ts deleted file mode 100644 index 53879ec7..00000000 --- a/cli/src/constants.ts +++ /dev/null @@ -1,9 +0,0 @@ -import type {UserConfigFile} from './plugins/plugin-shared' - -export const PathPlaceholders = { - USER_HOME: '~', - WORKSPACE: '$WORKSPACE' -} as const - -type DefaultUserConfig = Readonly>> -export const DEFAULT_USER_CONFIG = {} as DefaultUserConfig diff --git a/cli/src/core/config/mod.rs b/cli/src/core/config/mod.rs index 31f83f43..ebce074f 100644 --- a/cli/src/core/config/mod.rs +++ b/cli/src/core/config/mod.rs @@ -78,10 +78,6 @@ pub struct AindexConfig { pub arch: Option, } -/// Shadow source project configuration (deprecated, use AindexConfig). -#[deprecated(since = "2026.10303.0", note = "Use AindexConfig instead")] -pub type ShadowSourceProjectConfig = AindexConfig; - /// Per-plugin fast command series override options. #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] #[serde(rename_all = "camelCase")] @@ -219,16 +215,6 @@ fn merge_aindex( } } -/// Merge aindex configs (deprecated, use merge_aindex). -#[deprecated(since = "2026.10303.0", note = "Use merge_aindex instead")] -#[allow(dead_code)] -fn merge_shadow_source_project( - a: &Option, - b: &Option, -) -> Option { - merge_aindex(a, b) -} - /// Merge two configs. `over` fields take priority over `base`. pub fn merge_configs_pair(base: &UserConfigFile, over: &UserConfigFile) -> UserConfigFile { let merged_aindex = merge_aindex( @@ -602,18 +588,10 @@ pub fn ensure_aindex_config_link(aindex_dir: &str, logger: &Logger) { ensure_config_link(&config_path, &global_path, logger); } -/// Ensure the shadow source project directory has a `.tnmsc.json` symlink -/// pointing to the global config (deprecated, use ensure_aindex_config_link). -#[deprecated(since = "2026.10303.0", note = "Use ensure_aindex_config_link instead")] -#[allow(dead_code)] -pub fn ensure_shadow_project_config_link(shadow_project_dir: &str, logger: &Logger) { - ensure_aindex_config_link(shadow_project_dir, logger); -} - /// Validate global config file strictly. /// /// - If config doesn't exist: create default config, log warn, continue -/// - If config is invalid: delete and recreate, log error, return should_exit=true +/// - If config is invalid: preserve the file, log error, return should_exit=true pub fn validate_and_ensure_global_config( default_config: &UserConfigFile, ) -> GlobalConfigValidationResult { @@ -646,7 +624,7 @@ pub fn validate_and_ensure_global_config( "error": e.to_string() })), ); - return recreate_config_and_exit(&config_path, default_config, &logger, vec![msg]); + return preserve_invalid_config_and_exit(&config_path, &logger, vec![msg]); } }; @@ -662,7 +640,7 @@ pub fn validate_and_ensure_global_config( "error": e.to_string() })), ); - return recreate_config_and_exit(&config_path, default_config, &logger, vec![msg]); + return preserve_invalid_config_and_exit(&config_path, &logger, vec![msg]); } }; @@ -672,12 +650,7 @@ pub fn validate_and_ensure_global_config( Value::String("global config must be a JSON object".into()), Some(serde_json::json!({"path": config_path.to_string_lossy()})), ); - return recreate_config_and_exit( - &config_path, - default_config, - &logger, - vec!["Config must be a JSON object".into()], - ); + return preserve_invalid_config_and_exit(&config_path, &logger, vec!["Config must be a JSON object".into()]); } // Try to deserialize @@ -690,7 +663,7 @@ pub fn validate_and_ensure_global_config( "error": e.to_string() })), ); - return recreate_config_and_exit(&config_path, default_config, &logger, vec![msg]); + return preserve_invalid_config_and_exit(&config_path, &logger, vec![msg]); } GlobalConfigValidationResult { @@ -701,27 +674,13 @@ pub fn validate_and_ensure_global_config( } } -fn recreate_config_and_exit( +fn preserve_invalid_config_and_exit( config_path: &Path, - default_config: &UserConfigFile, logger: &Logger, errors: Vec, ) -> GlobalConfigValidationResult { - if let Err(_) = fs::remove_file(config_path) { - logger.warn( - Value::String("failed to delete invalid config".into()), - Some(serde_json::json!({"path": config_path.to_string_lossy()})), - ); - } else { - logger.info( - Value::String("deleted invalid config".into()), - Some(serde_json::json!({"path": config_path.to_string_lossy()})), - ); - } - - write_config(config_path, default_config, logger); logger.error( - Value::String("recreated default config, please review and restart".into()), + Value::String("invalid global config preserved, please fix it manually and restart".into()), Some(serde_json::json!({"path": config_path.to_string_lossy()})), ); @@ -740,6 +699,7 @@ fn recreate_config_and_exit( #[cfg(test)] mod tests { use super::*; + use tempfile::TempDir; #[test] fn test_resolve_tilde() { @@ -783,8 +743,8 @@ mod tests { "commands": {"src": "src/commands", "dist": "dist/commands"}, "subAgents": {"src": "src/agents", "dist": "dist/agents"}, "rules": {"src": "src/rules", "dist": "dist/rules"}, - "globalPrompt": {"src": "app/global.cn.mdx", "dist": "dist/global.mdx"}, - "workspacePrompt": {"src": "app/workspace.cn.mdx", "dist": "dist/app/workspace.mdx"}, + "globalPrompt": {"src": "app/global.src.mdx", "dist": "dist/global.mdx"}, + "workspacePrompt": {"src": "app/workspace.src.mdx", "dist": "dist/app/workspace.mdx"}, "app": {"src": "app", "dist": "dist/app"}, "ext": {"src": "ext", "dist": "dist/ext"}, "arch": {"src": "arch", "dist": "dist/arch"} @@ -974,6 +934,34 @@ mod tests { assert!(path_str.contains(DEFAULT_GLOBAL_CONFIG_DIR)); assert!(path_str.contains(DEFAULT_CONFIG_FILE_NAME)); } + + #[test] + fn test_preserve_invalid_config_and_exit_keeps_original_file() { + let temp_dir = match TempDir::new() { + Ok(value) => value, + Err(error) => panic!("failed to create temp dir: {error}"), + }; + let config_path = temp_dir.path().join(DEFAULT_CONFIG_FILE_NAME); + let invalid_content = "{invalid-json"; + + if let Err(error) = fs::write(&config_path, invalid_content) { + panic!("failed to write invalid config fixture: {error}"); + } + + let logger = create_logger("ConfigLoaderTest", None); + let result = preserve_invalid_config_and_exit(&config_path, &logger, vec!["Invalid JSON".into()]); + + assert!(!result.valid); + assert!(result.exists); + assert!(result.should_exit); + assert_eq!(result.errors, vec!["Invalid JSON".to_string()]); + + let retained = match fs::read_to_string(&config_path) { + Ok(value) => value, + Err(error) => panic!("failed to read retained config: {error}"), + }; + assert_eq!(retained, invalid_content); + } } diff --git a/cli/src/core/plugin_shared.rs b/cli/src/core/plugin_shared.rs index 69c739cc..8b96fe46 100644 --- a/cli/src/core/plugin_shared.rs +++ b/cli/src/core/plugin_shared.rs @@ -74,19 +74,6 @@ pub enum SkillResourceEncoding { Base64, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum SkillResourceCategory { - Code, - Data, - Document, - Config, - Script, - Image, - Binary, - Other, -} - // --------------------------------------------------------------------------- // Path types // --------------------------------------------------------------------------- @@ -351,7 +338,6 @@ pub struct SkillResource { pub relative_path: String, pub content: String, pub encoding: SkillResourceEncoding, - pub category: SkillResourceCategory, pub length: usize, #[serde(default, skip_serializing_if = "Option::is_none")] pub mime_type: Option, diff --git a/cli/src/index.ts b/cli/src/index.ts index e20b96da..ddd62ec3 100644 --- a/cli/src/index.ts +++ b/cli/src/index.ts @@ -1,24 +1,50 @@ import process from 'node:process' import {PluginPipeline} from '@/PluginPipeline' -import userPluginConfigPromise from './plugin.config' -import {createLogger} from './plugins/plugin-shared' +import {createLogger} from './plugins/plugin-core' export * from './Aindex' export * from './config' export * from './ConfigLoader' -export * from './constants' + export { default } from './plugin.config' async function main(): Promise { - const userPluginConfig = await userPluginConfigPromise const pipeline = new PluginPipeline(...process.argv) - await pipeline.run(userPluginConfig) + const {default: userPluginConfigPromise} = await import('./plugin.config') + const userPluginConfig = await userPluginConfigPromise + const result = await pipeline.run(userPluginConfig) + if (!result.success) process.exit(1) +} + +function isJsonMode(argv: readonly string[]): boolean { + return argv.some(arg => arg === '--json' || arg === '-j' || /^-[^-]*j/.test(arg)) +} + +function writeJsonFailure(errorMessage: string): void { + process.stdout.write(`${JSON.stringify({ + success: false, + filesAffected: 0, + dirsAffected: 0, + message: errorMessage, + pluginResults: [], + errors: [errorMessage] + })}\n`) } main().catch((e: unknown) => { + const errorMessage = e instanceof Error ? e.message : String(e) + if (isJsonMode(process.argv)) { + writeJsonFailure(errorMessage) + process.exit(1) + } const logger = createLogger('main', 'error') - logger.error('unhandled error', {error: e instanceof Error ? e.message : String(e)}) + logger.error('unhandled error', {error: errorMessage}) process.exit(1) }) + +export { + DEFAULT_USER_CONFIG, + PathPlaceholders +} from './plugins/plugin-core' diff --git a/cli/src/inputs/effect-md-cleanup.ts b/cli/src/inputs/effect-md-cleanup.ts index 9270db34..9c0eb9d4 100644 --- a/cli/src/inputs/effect-md-cleanup.ts +++ b/cli/src/inputs/effect-md-cleanup.ts @@ -1,10 +1,10 @@ import type { - CollectedInputContext, + InputCollectedContext, InputEffectContext, InputEffectResult, InputPluginContext -} from '../plugins/plugin-shared' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' +} from '../plugins/plugin-core' +import {AbstractInputPlugin} from '../plugins/plugin-core' export interface WhitespaceCleanupEffectResult extends InputEffectResult { readonly modifiedFiles: string[] @@ -142,7 +142,7 @@ export class MarkdownWhitespaceCleanupEffectInputPlugin extends AbstractInputPlu return '\n' } - collect(_ctx: InputPluginContext): Partial { + collect(_ctx: InputPluginContext): Partial { return {} } } diff --git a/cli/src/inputs/effect-orphan-cleanup.test.ts b/cli/src/inputs/effect-orphan-cleanup.test.ts new file mode 100644 index 00000000..eb024c2e --- /dev/null +++ b/cli/src/inputs/effect-orphan-cleanup.test.ts @@ -0,0 +1,140 @@ +import type {InputPluginContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import {OrphanFileCleanupEffectInputPlugin} from './effect-orphan-cleanup' + +const legacySourceExtension = '.cn.mdx' + +function createContext(tempWorkspace: string): InputPluginContext { + const options = mergeConfig({workspaceDir: tempWorkspace}) + + return { + logger: createLogger('OrphanFileCleanupEffectInputPluginTest', 'error'), + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputPluginContext +} + +class TestOrphanFileCleanupEffectInputPlugin extends OrphanFileCleanupEffectInputPlugin { + constructor(private readonly planFactory: (ctx: ReturnType & {readonly fs: typeof fs, readonly path: typeof path}) => { + filesToDelete: string[] + dirsToDelete: string[] + errors: {path: string, error: Error}[] + }) { + super() + } + + protected override buildDeletionPlan(ctx: Parameters[0]): { + filesToDelete: string[] + dirsToDelete: string[] + errors: {path: string, error: Error}[] + } { + const basePaths = this.resolveBasePaths(ctx.userConfigOptions) + return this.planFactory({...basePaths, fs: ctx.fs, path: ctx.path}) + } +} + +describe('orphan file cleanup effect', () => { + it('keeps dist command files when a matching .src.mdx source exists', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-test-')) + const srcDir = path.join(tempWorkspace, 'aindex', 'commands') + const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'commands') + const distFile = path.join(distDir, 'demo.mdx') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync(path.join(srcDir, 'demo.src.mdx'), '---\ndescription: source\n---\nSource prompt', 'utf8') + fs.writeFileSync(distFile, 'Compiled prompt', 'utf8') + + const plugin = new OrphanFileCleanupEffectInputPlugin() + const [result] = await plugin.executeEffects(createContext(tempWorkspace)) + + expect(result?.success).toBe(true) + expect(fs.existsSync(distFile)).toBe(true) + expect(result?.deletedFiles ?? []).toHaveLength(0) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('blocks deleting dist command mdx files when only a legacy cn source remains', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-legacy-test-')) + const srcDir = path.join(tempWorkspace, 'aindex', 'commands') + const distDir = path.join(tempWorkspace, 'aindex', 'dist', 'commands') + const distFile = path.join(distDir, 'demo.mdx') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync(path.join(srcDir, `demo${legacySourceExtension}`), '---\ndescription: legacy\n---\nLegacy prompt', 'utf8') + fs.writeFileSync(distFile, 'Compiled prompt', 'utf8') + + const plugin = new OrphanFileCleanupEffectInputPlugin() + await expect(plugin.executeEffects(createContext(tempWorkspace))).rejects.toThrow('Protected deletion guard blocked orphan-file-cleanup') + expect(fs.existsSync(distFile)).toBe(true) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('fails when an orphan cleanup candidate hits an exact protected path', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-guard-exact-')) + const safeDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'commands', 'safe.mdx') + const globalConfigPath = path.join(os.homedir(), '.aindex', '.tnmsc.json') + + try { + fs.mkdirSync(path.dirname(safeDistFile), {recursive: true}) + fs.writeFileSync(safeDistFile, 'Compiled prompt', 'utf8') + + const plugin = new TestOrphanFileCleanupEffectInputPlugin(() => ({ + filesToDelete: [safeDistFile, globalConfigPath], + dirsToDelete: [], + errors: [] + })) + + await expect(plugin.executeEffects(createContext(tempWorkspace))).rejects.toThrow('Protected deletion guard blocked orphan-file-cleanup') + expect(fs.existsSync(safeDistFile)).toBe(true) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('fails without partial deletion when safe and subtree-protected candidates are mixed', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-guard-subtree-')) + const srcDir = path.join(tempWorkspace, 'aindex', 'commands') + const protectedSourceFile = path.join(srcDir, 'demo.src.mdx') + const safeDistFile = path.join(tempWorkspace, 'aindex', 'dist', 'commands', 'safe.mdx') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(path.dirname(safeDistFile), {recursive: true}) + fs.writeFileSync(protectedSourceFile, '---\ndescription: source\n---\nSource prompt', 'utf8') + fs.writeFileSync(safeDistFile, 'Compiled prompt', 'utf8') + + const plugin = new TestOrphanFileCleanupEffectInputPlugin(() => ({ + filesToDelete: [safeDistFile, protectedSourceFile], + dirsToDelete: [], + errors: [] + })) + + await expect(plugin.executeEffects(createContext(tempWorkspace))).rejects.toThrow('Protected deletion guard blocked orphan-file-cleanup') + expect(fs.existsSync(safeDistFile)).toBe(true) + expect(fs.existsSync(protectedSourceFile)).toBe(true) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/inputs/effect-orphan-cleanup.ts b/cli/src/inputs/effect-orphan-cleanup.ts index 7b9510fa..34168857 100644 --- a/cli/src/inputs/effect-orphan-cleanup.ts +++ b/cli/src/inputs/effect-orphan-cleanup.ts @@ -1,75 +1,169 @@ -import type {CollectedInputContext, InputEffectContext, InputEffectResult, InputPluginContext} from '../plugins/plugin-shared' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' +import type {InputCollectedContext, InputEffectContext, InputEffectResult, InputPluginContext} from '../plugins/plugin-core' +import {AbstractInputPlugin, SourcePromptFileExtensions} from '../plugins/plugin-core' +import { + collectConfiguredAindexInputRules, + createProtectedDeletionGuard, + partitionDeletionTargets, + ProtectedDeletionGuardError +} from '../ProtectedDeletionGuard' export interface OrphanCleanupEffectResult extends InputEffectResult { readonly deletedFiles: string[] readonly deletedDirs: string[] } +const OrphanCleanupDistSubDirs = ['skills', 'commands', 'agents', 'app'] as const + +type OrphanCleanupSubDir = (typeof OrphanCleanupDistSubDirs)[number] + +type OrphanCleanupSourcePaths = Readonly> + +interface OrphanCleanupPlan { + readonly filesToDelete: string[] + readonly dirsToDelete: string[] + readonly errors: readonly {path: string, error: Error}[] +} + export class OrphanFileCleanupEffectInputPlugin extends AbstractInputPlugin { constructor() { super('OrphanFileCleanupEffectInputPlugin') this.registerEffect('orphan-file-cleanup', this.cleanupOrphanFiles.bind(this), 20) } - private async cleanupOrphanFiles(ctx: InputEffectContext): Promise { - const {fs, path, aindexDir, dryRun, logger, userConfigOptions} = ctx - - const distDir = path.join(aindexDir, 'dist') + protected buildProtectedDeletionGuard(ctx: InputEffectContext): ReturnType { + return createProtectedDeletionGuard({ + workspaceDir: ctx.workspaceDir, + aindexDir: ctx.aindexDir, + rules: [ + ...collectConfiguredAindexInputRules(ctx.userConfigOptions, ctx.aindexDir, { + workspaceDir: ctx.workspaceDir + }), + ...(ctx.userConfigOptions.cleanupProtection?.rules ?? []).map(rule => ({ + path: rule.path, + protectionMode: rule.protectionMode, + reason: rule.reason ?? 'configured cleanup protection rule', + source: 'configured-cleanup-protection', + matcher: rule.matcher ?? 'path' + })) + ] + }) + } - const deletedFiles: string[] = [] - const deletedDirs: string[] = [] + protected buildDeletionPlan( + ctx: InputEffectContext, + distDir: string, + srcPaths: OrphanCleanupSourcePaths + ): OrphanCleanupPlan { + const filesToDelete: string[] = [] + const dirsToDelete: string[] = [] const errors: {path: string, error: Error}[] = [] + for (const subDir of OrphanCleanupDistSubDirs) { + const distSubDirPath = ctx.path.join(distDir, subDir) + if (!ctx.fs.existsSync(distSubDirPath)) continue + if (!ctx.fs.statSync(distSubDirPath).isDirectory()) continue + this.collectDirectoryPlan(ctx, distSubDirPath, subDir, srcPaths[subDir], filesToDelete, dirsToDelete, errors) + } + + return {filesToDelete, dirsToDelete, errors} + } + + private async cleanupOrphanFiles(ctx: InputEffectContext): Promise { + const {fs, path, aindexDir, logger, userConfigOptions, dryRun} = ctx + const distDir = path.join(aindexDir, 'dist') + if (!fs.existsSync(distDir)) { logger.debug({action: 'orphan-cleanup', message: 'dist/ directory does not exist, skipping', distDir}) return { success: true, description: 'dist/ directory does not exist, nothing to clean', - deletedFiles, - deletedDirs + deletedFiles: [], + deletedDirs: [] } } const aindexConfig = userConfigOptions.aindex - const srcPaths: Record = { + const srcPaths: OrphanCleanupSourcePaths = { skills: aindexConfig?.skills?.src ?? 'skills', commands: aindexConfig?.commands?.src ?? 'commands', agents: aindexConfig?.subAgents?.src ?? 'subagents', app: aindexConfig?.app?.src ?? 'app' } - const distSubDirs = ['skills', 'commands', 'agents', 'app'] + const plan = this.buildDeletionPlan(ctx, distDir, srcPaths) + if (plan.errors.length > 0) logger.warn({action: 'orphan-cleanup', errors: plan.errors.map(error => ({path: error.path, error: error.error.message}))}) + + const guard = this.buildProtectedDeletionGuard(ctx) + const filePartition = partitionDeletionTargets(plan.filesToDelete, guard) + const dirPartition = partitionDeletionTargets(plan.dirsToDelete, guard) + const violations = [...filePartition.violations, ...dirPartition.violations].sort((a, b) => a.targetPath.localeCompare(b.targetPath)) + + if (violations.length > 0) { + return { + success: false, + description: `Protected deletion guard blocked orphan cleanup for ${violations.length} path(s)`, + deletedFiles: [], + deletedDirs: [], + error: new ProtectedDeletionGuardError('orphan-file-cleanup', violations) + } + } + + if (dryRun) { + return { + success: true, + description: `Would delete ${filePartition.safePaths.length} files and ${dirPartition.safePaths.length} directories`, + deletedFiles: [...filePartition.safePaths], + deletedDirs: [...dirPartition.safePaths].sort((a, b) => b.length - a.length) + } + } + + const deletedFiles: string[] = [] + const deletedDirs: string[] = [] + const deleteErrors: {path: string, error: Error}[] = [...plan.errors] - for (const subDir of distSubDirs) { - const distSubDirPath = path.join(distDir, subDir) - if (fs.existsSync(distSubDirPath)) this.cleanupDirectory(ctx, distSubDirPath, subDir, srcPaths[subDir]!, deletedFiles, deletedDirs, errors, dryRun ?? false) + for (const filePath of filePartition.safePaths) { + try { + fs.unlinkSync(filePath) + deletedFiles.push(filePath) + logger.debug({action: 'orphan-cleanup', deleted: filePath}) + } + catch (error) { + deleteErrors.push({path: filePath, error: error as Error}) + logger.warn({action: 'orphan-cleanup', message: 'Failed to delete file', path: filePath, error: (error as Error).message}) + } } - const hasErrors = errors.length > 0 - if (hasErrors) logger.warn({action: 'orphan-cleanup', errors: errors.map(e => ({path: e.path, error: e.error.message}))}) + for (const dirPath of [...dirPartition.safePaths].sort((a, b) => b.length - a.length)) { + try { + fs.rmdirSync(dirPath) + deletedDirs.push(dirPath) + logger.debug({action: 'orphan-cleanup', deletedDir: dirPath}) + } + catch (error) { + deleteErrors.push({path: dirPath, error: error as Error}) + logger.warn({action: 'orphan-cleanup', message: 'Failed to delete directory', path: dirPath, error: (error as Error).message}) + } + } + const hasErrors = deleteErrors.length > 0 return { success: !hasErrors, - description: dryRun - ? `Would delete ${deletedFiles.length} files and ${deletedDirs.length} directories` - : `Deleted ${deletedFiles.length} files and ${deletedDirs.length} directories`, + description: `Deleted ${deletedFiles.length} files and ${deletedDirs.length} directories`, deletedFiles, deletedDirs, - ...hasErrors && {error: new Error(`${errors.length} errors occurred during cleanup`)} + ...hasErrors && {error: new Error(`${deleteErrors.length} errors occurred during cleanup`)} } } - private cleanupDirectory( + protected collectDirectoryPlan( ctx: InputEffectContext, distDirPath: string, dirType: string, srcPath: string, - deletedFiles: string[], - deletedDirs: string[], - errors: {path: string, error: Error}[], - dryRun: boolean - ): void { + filesToDelete: string[], + dirsToDelete: string[], + errors: {path: string, error: Error}[] + ): boolean { const {fs, path, aindexDir, logger} = ctx let entries: import('node:fs').Dirent[] @@ -79,36 +173,32 @@ export class OrphanFileCleanupEffectInputPlugin extends AbstractInputPlugin { catch (error) { errors.push({path: distDirPath, error: error as Error}) logger.warn({action: 'orphan-cleanup', message: 'Failed to read directory', path: distDirPath, error: (error as Error).message}) - return + return false } + let hasRetainedEntries = false + for (const entry of entries) { const entryPath = path.join(distDirPath, entry.name) if (entry.isDirectory()) { - this.cleanupDirectory(ctx, entryPath, dirType, srcPath, deletedFiles, deletedDirs, errors, dryRun) - this.removeEmptyDirectory(ctx, entryPath, deletedDirs, errors, dryRun) - } else if (entry.isFile()) { - const isOrphan = this.isOrphanFile(ctx, entryPath, dirType, srcPath, aindexDir) - - if (isOrphan) { - if (dryRun) { - logger.debug({action: 'orphan-cleanup', dryRun: true, wouldDelete: entryPath}) - deletedFiles.push(entryPath) - } else { - try { - fs.unlinkSync(entryPath) - deletedFiles.push(entryPath) - logger.debug({action: 'orphan-cleanup', deleted: entryPath}) - } - catch (error) { - errors.push({path: entryPath, error: error as Error}) - logger.warn({action: 'orphan-cleanup', message: 'Failed to delete file', path: entryPath, error: (error as Error).message}) - } - } - } + const childWillBeEmpty = this.collectDirectoryPlan(ctx, entryPath, dirType, srcPath, filesToDelete, dirsToDelete, errors) + if (childWillBeEmpty) dirsToDelete.push(entryPath) + else hasRetainedEntries = true + continue + } + + if (!entry.isFile()) { + hasRetainedEntries = true + continue } + + const isOrphan = this.isOrphanFile(ctx, entryPath, dirType, srcPath, aindexDir) + if (isOrphan) filesToDelete.push(entryPath) + else hasRetainedEntries = true } + + return !hasRetainedEntries } private isOrphanFile( @@ -128,13 +218,10 @@ export class OrphanFileCleanupEffectInputPlugin extends AbstractInputPlugin { const relativeDir = path.dirname(relativeFromType) const baseName = fileName.replace(/\.mdx$/, '') - if (isMdxFile) { - const possibleSrcPaths = this.getPossibleSourcePaths(path, aindexDir, dirType, srcPath, baseName, relativeDir) - return !possibleSrcPaths.some(srcPath => fs.existsSync(srcPath)) - } - const possibleSrcPaths: string[] = [] - possibleSrcPaths.push(path.join(aindexDir, srcPath, relativeFromType)) - return !possibleSrcPaths.some(srcPath => fs.existsSync(srcPath)) + if (!isMdxFile) return !fs.existsSync(path.join(aindexDir, srcPath, relativeFromType)) + + const possibleSrcPaths = this.getPossibleSourcePaths(path, aindexDir, dirType, srcPath, baseName, relativeDir) + return !possibleSrcPaths.some(candidatePath => fs.existsSync(candidatePath)) } private getPossibleSourcePaths( @@ -151,57 +238,26 @@ export class OrphanFileCleanupEffectInputPlugin extends AbstractInputPlugin { const skillName = skillParts[0] ?? baseName const remainingPath = relativeDir === '.' ? '' : relativeDir.slice(skillName.length + 1) - if (remainingPath !== '') return [nodePath.join(aindexDir, srcPath, skillName, remainingPath, `${baseName}.cn.mdx`)] + if (remainingPath !== '') { + return SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, skillName, remainingPath, `${baseName}${extension}`)) + } + return [ - nodePath.join(aindexDir, srcPath, skillName, 'SKILL.cn.mdx'), - nodePath.join(aindexDir, srcPath, skillName, 'skill.cn.mdx') + ...SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, skillName, `SKILL${extension}`)), + ...SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, skillName, `skill${extension}`)) ] } case 'commands': - return relativeDir === '.' - ? [nodePath.join(aindexDir, srcPath, `${baseName}.cn.mdx`)] - : [nodePath.join(aindexDir, srcPath, relativeDir, `${baseName}.cn.mdx`)] case 'agents': - return relativeDir === '.' - ? [nodePath.join(aindexDir, srcPath, `${baseName}.cn.mdx`)] - : [nodePath.join(aindexDir, srcPath, relativeDir, `${baseName}.cn.mdx`)] case 'app': return relativeDir === '.' - ? [nodePath.join(aindexDir, srcPath, `${baseName}.cn.mdx`)] - : [nodePath.join(aindexDir, srcPath, relativeDir, `${baseName}.cn.mdx`)] + ? SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, `${baseName}${extension}`)) + : SourcePromptFileExtensions.map(extension => nodePath.join(aindexDir, srcPath, relativeDir, `${baseName}${extension}`)) default: return [] } } - private removeEmptyDirectory( - ctx: InputEffectContext, - dirPath: string, - deletedDirs: string[], - errors: {path: string, error: Error}[], - dryRun: boolean - ): void { - const {fs, logger} = ctx - - try { - const entries = fs.readdirSync(dirPath) - if (entries.length === 0) { - if (dryRun) { - logger.debug({action: 'orphan-cleanup', dryRun: true, wouldDeleteDir: dirPath}) - deletedDirs.push(dirPath) - } else { - fs.rmdirSync(dirPath) - deletedDirs.push(dirPath) - logger.debug({action: 'orphan-cleanup', deletedDir: dirPath}) - } - } - } - catch (error) { - errors.push({path: dirPath, error: error as Error}) - logger.warn({action: 'orphan-cleanup', message: 'Failed to check/remove directory', path: dirPath, error: (error as Error).message}) - } - } - - collect(_ctx: InputPluginContext): Partial { + collect(_ctx: InputPluginContext): Partial { return {} } } diff --git a/cli/src/inputs/effect-skill-sync.test.ts b/cli/src/inputs/effect-skill-sync.test.ts new file mode 100644 index 00000000..41748f50 --- /dev/null +++ b/cli/src/inputs/effect-skill-sync.test.ts @@ -0,0 +1,54 @@ +import type {InputPluginContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import {SkillNonSrcFileSyncEffectInputPlugin} from './effect-skill-sync' + +const legacySourceExtension = '.cn.mdx' + +function createContext(tempWorkspace: string): InputPluginContext { + const options = mergeConfig({workspaceDir: tempWorkspace}) + + return { + logger: createLogger('SkillNonSrcFileSyncEffectInputPluginTest', 'error'), + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputPluginContext +} + +describe('skill non-src file sync effect', () => { + it('skips .src.mdx files while copying non-source skill assets', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-sync-test-')) + const srcSkillDir = path.join(tempWorkspace, 'aindex', 'src', 'skills', 'demo') + const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') + + try { + fs.mkdirSync(srcSkillDir, {recursive: true}) + fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: demo\n---\nDemo skill', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, 'guide.src.mdx'), 'Guide child doc', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, `legacy${legacySourceExtension}`), 'Legacy child doc', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, 'guide.mdx'), 'English child doc', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, 'notes.md'), 'Skill notes', 'utf8') + + const plugin = new SkillNonSrcFileSyncEffectInputPlugin() + const [result] = await plugin.executeEffects(createContext(tempWorkspace)) + + expect(result?.success).toBe(true) + expect(fs.existsSync(path.join(distSkillDir, 'notes.md'))).toBe(true) + expect(fs.existsSync(path.join(distSkillDir, 'skill.src.mdx'))).toBe(false) + expect(fs.existsSync(path.join(distSkillDir, 'guide.src.mdx'))).toBe(false) + expect(fs.existsSync(path.join(distSkillDir, `legacy${legacySourceExtension}`))).toBe(false) + expect(fs.existsSync(path.join(distSkillDir, 'guide.mdx'))).toBe(false) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/inputs/effect-skill-sync.ts b/cli/src/inputs/effect-skill-sync.ts index 017b0408..fb246e8f 100644 --- a/cli/src/inputs/effect-skill-sync.ts +++ b/cli/src/inputs/effect-skill-sync.ts @@ -1,8 +1,8 @@ import type {Buffer} from 'node:buffer' -import type {CollectedInputContext, InputEffectContext, InputEffectResult, InputPluginContext} from '../plugins/plugin-shared' +import type {InputCollectedContext, InputEffectContext, InputEffectResult, InputPluginContext} from '../plugins/plugin-core' import {createHash} from 'node:crypto' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' +import {AbstractInputPlugin, hasSourcePromptExtension} from '../plugins/plugin-core' export interface SkillSyncEffectResult extends InputEffectResult { readonly copiedFiles: string[] @@ -111,7 +111,7 @@ export class SkillNonSrcFileSyncEffectInputPlugin extends AbstractInputPlugin { dryRun ) } else if (entry.isFile()) { - if (entry.name.endsWith('.cn.mdx')) continue + if (hasSourcePromptExtension(entry.name) || entry.name.endsWith('.mdx')) continue const targetDir = path.dirname(distPath) if (!fs.existsSync(targetDir)) { @@ -173,7 +173,7 @@ export class SkillNonSrcFileSyncEffectInputPlugin extends AbstractInputPlugin { return createHash('sha256').update(content).digest('hex') } - collect(_ctx: InputPluginContext): Partial { + collect(_ctx: InputPluginContext): Partial { return {} } } diff --git a/cli/src/inputs/input-agentskills-types.ts b/cli/src/inputs/input-agentskills-types.ts index 68c2499f..03f3f2f2 100644 --- a/cli/src/inputs/input-agentskills-types.ts +++ b/cli/src/inputs/input-agentskills-types.ts @@ -2,7 +2,7 @@ * Types for SkillInputPlugin resource processing */ -import type {SkillChildDoc, SkillResource} from '../plugins/plugin-shared' +import type {SkillChildDoc, SkillResource} from '../plugins/plugin-core' export interface ResourceScanResult { readonly childDocs: SkillChildDoc[] diff --git a/cli/src/inputs/input-agentskills.ts b/cli/src/inputs/input-agentskills.ts index 6bf480d3..56d9c1a5 100644 --- a/cli/src/inputs/input-agentskills.ts +++ b/cli/src/inputs/input-agentskills.ts @@ -1,10 +1,8 @@ import type {Dirent} from 'node:fs' import type { - CollectedInputContext, ILogger, + InputCollectedContext, InputPluginContext, - LocalizedPrompt, - LocalizedSkillPrompt, McpServerConfig, SkillChildDoc, SkillMcpConfig, @@ -12,7 +10,7 @@ import type { SkillResource, SkillResourceEncoding, SkillYAMLFrontMatter -} from '../plugins/plugin-shared' +} from '../plugins/plugin-core' import type {ResourceScanResult} from './input-agentskills-types' import {Buffer} from 'node:buffer' @@ -20,8 +18,7 @@ import * as nodePath from 'node:path' import {mdxToMd} from '@truenine/md-compiler' import {MetadataValidationError} from '@truenine/md-compiler/errors' import {parseMarkdown, transformMdxReferencesToMd} from '@truenine/md-compiler/markdown' -import {AbstractInputPlugin, createLocalizedPromptReader} from '@truenine/plugin-input-shared' -import {FilePathKind, PromptKind, validateSkillMetadata} from '../plugins/plugin-shared' +import {AbstractInputPlugin, createLocalizedPromptReader, FilePathKind, PromptKind, SourceLocaleExtensions, validateSkillMetadata} from '../plugins/plugin-core' export * from './input-agentskills-types' // Re-export from types file @@ -166,26 +163,6 @@ const SKILL_RESOURCE_BINARY_EXTENSIONS = new Set([ // Binary extensions '.odp' ]) -type ResourceCategory = 'image' | 'code' | 'data' | 'document' | 'config' | 'script' | 'binary' | 'other' - -const FILE_TYPE_CATEGORIES: Record = { - image: ['.png', '.jpg', '.jpeg', '.gif', '.webp', '.ico', '.bmp', '.tiff', '.svg'], - code: ['.kt', '.java', '.py', '.pyi', '.pyx', '.ts', '.tsx', '.js', '.jsx', '.mjs', '.cjs', '.go', '.rs', '.c', '.cpp', '.cc', '.h', '.hpp', '.hxx', '.cs', '.fs', '.fsx', '.vb', '.rb', '.php', '.swift', '.scala', '.groovy', '.lua', '.r', '.jl', '.ex', '.exs', '.erl', '.clj', '.cljs', '.hs', '.ml', '.mli', '.nim', '.zig', '.v', '.dart', '.vue', '.svelte', '.d.ts', '.d.mts', '.d.cts'], - data: ['.sql', '.json', '.jsonc', '.json5', '.xml', '.xsd', '.xsl', '.xslt', '.yaml', '.yml', '.toml', '.csv', '.tsv', '.graphql', '.gql', '.proto'], - document: ['.txt', '.text', '.rtf', '.log', '.docx', '.doc', '.xlsx', '.xls', '.pptx', '.ppt', '.pdf', '.odt', '.ods', '.odp'], - config: ['.ini', '.conf', '.cfg', '.config', '.properties', '.env', '.envrc', '.editorconfig', '.gitignore', '.gitattributes', '.npmrc', '.nvmrc', '.npmignore', '.eslintrc', '.prettierrc', '.stylelintrc', '.babelrc', '.browserslistrc'], - script: ['.sh', '.bash', '.zsh', '.fish', '.ps1', '.psm1', '.psd1', '.bat', '.cmd'], - binary: ['.exe', '.dll', '.so', '.dylib', '.bin', '.wasm', '.class', '.jar', '.war', '.pyd', '.pyc', '.pyo', '.zip', '.tar', '.gz', '.bz2', '.7z', '.rar', '.ttf', '.otf', '.woff', '.woff2', '.eot', '.db', '.sqlite', '.sqlite3'] -} - -function getResourceCategory(ext: string): ResourceCategory { - const lowerExt = ext.toLowerCase() - for (const [category, extensions] of Object.entries(FILE_TYPE_CATEGORIES)) { - if (extensions.includes(lowerExt)) return category as ResourceCategory - } - return 'other' -} - function isBinaryResourceExtension(ext: string): boolean { return SKILL_RESOURCE_BINARY_EXTENSIONS.has(ext.toLowerCase()) } @@ -279,9 +256,9 @@ class ResourceProcessor { extension: ext, fileName, relativePath, + sourcePath: filePath, content, encoding, - category: getResourceCategory(ext), length, ...mimeType != null && {mimeType} } @@ -391,13 +368,15 @@ async function createSkillPrompt( mcpConfig?: SkillMcpConfig, childDocs: SkillPrompt['childDocs'] = [], resources: SkillPrompt['resources'] = [], - seriName?: string | string[] | null + seriName?: string | string[] | null, + compiledMetadata?: Record ): Promise { const {logger, globalScope, fs} = ctx const distFilePath = nodePath.join(skillAbsoluteDir, 'skill.mdx') let rawContent = content - let parsed: ReturnType> | undefined + let parsed: ReturnType> | undefined, + distMetadata: Record | undefined if (fs.existsSync(distFilePath)) { try { @@ -411,22 +390,23 @@ async function createSkillPrompt( }) content = transformMdxReferencesToMd(compileResult.content) + distMetadata = compileResult.metadata.fields } catch (e) { logger.warn('failed to recompile skill from dist', {skill: name, error: e}) } } - const exportMetadata = extractSkillMetadataFromExport(rawContent) // Extract metadata from JS export if YAML front matter is not present + const exportMetadata = distMetadata ?? compiledMetadata ?? extractSkillMetadataFromExport(rawContent) // Use metadata from dist file, or from compiled MDX, or extract from raw content - const finalDescription = parsed?.yamlFrontMatter?.description ?? exportMetadata.description + const finalDescription = parsed?.yamlFrontMatter?.description ?? exportMetadata?.description as string | undefined if (finalDescription == null || finalDescription.trim().length === 0) { // Strict validation: description must exist and not be empty logger.error('SKILL_VALIDATION_FAILED: description is required and cannot be empty', { skill: name, skillDir, yamlDescription: parsed?.yamlFrontMatter?.description, - exportDescription: exportMetadata.description, + exportDescription: exportMetadata?.description, hint: 'Add a non-empty description field to the SKILL.md front matter or export default' }) throw new Error(`Skill "${name}" validation failed: description is required and cannot be empty`) @@ -591,14 +571,14 @@ export class SkillInputPlugin extends AbstractInputPlugin { return processor.scanSkillDirectory(skillDir, currentRelativePath) } - async collect(ctx: InputPluginContext): Promise> { + async collect(ctx: InputPluginContext): Promise> { const {userConfigOptions: options, logger, fs, path: pathModule, globalScope} = ctx const {aindexDir} = this.resolveBasePaths(options) const srcSkillDir = this.resolveAindexPath(options.aindex.skills.src, aindexDir) const distSkillDir = this.resolveAindexPath(options.aindex.skills.dist, aindexDir) - const legacySkills: SkillPrompt[] = [] + const flatSkills: SkillPrompt[] = [] const reader = createLocalizedPromptReader(fs, pathModule, logger, globalScope) const {prompts: localizedSkills, errors} = await reader.readDirectoryStructure( @@ -607,13 +587,21 @@ export class SkillInputPlugin extends AbstractInputPlugin { { kind: PromptKind.Skill, entryFileName: 'skill', - localeExtensions: {zh: '.cn.mdx', en: '.mdx'}, + localeExtensions: SourceLocaleExtensions, isDirectoryStructure: true, - createPrompt: async (content, locale, name) => { + createPrompt: async (content, locale, name, metadata) => { const skillDistDir = pathModule.join(distSkillDir, name) - const processor = new ResourceProcessor({fs, logger, skillDir: skillDistDir}) - const {childDocs, resources} = processor.scanSkillDirectory(skillDistDir) - const mcpConfig = readMcpConfig(skillDistDir, fs, logger) + let childDocs: SkillChildDoc[] = [] + let resources: SkillResource[] = [] + let mcpConfig: SkillMcpConfig | undefined + + if (fs.existsSync(skillDistDir)) { + const processor = new ResourceProcessor({fs, logger, skillDir: skillDistDir}) + const {childDocs: scannedChildDocs, resources: scannedResources} = processor.scanSkillDirectory(skillDistDir) + childDocs = scannedChildDocs + resources = scannedResources + mcpConfig = readMcpConfig(skillDistDir, fs, logger) + } return createSkillPrompt( content, @@ -624,7 +612,9 @@ export class SkillInputPlugin extends AbstractInputPlugin { ctx, mcpConfig, childDocs, - resources + resources, + void 0, + metadata ) } } @@ -634,7 +624,7 @@ export class SkillInputPlugin extends AbstractInputPlugin { for (const localized of localizedSkills) { const prompt = localized.dist?.prompt ?? localized.src.default.prompt - if (prompt) legacySkills.push(prompt) + if (prompt) flatSkills.push(prompt) } if (fs.existsSync(distSkillDir)) { @@ -659,7 +649,7 @@ export class SkillInputPlugin extends AbstractInputPlugin { skillAbsoluteDir, ctx ) - if (skill) legacySkills.push(skill) + if (skill) flatSkills.push(skill) } catch (e) { logger.error('failed to parse skill', {file: skillFilePath, error: e}) @@ -667,19 +657,8 @@ export class SkillInputPlugin extends AbstractInputPlugin { } } - const promptIndex = new Map() - for (const skill of localizedSkills) promptIndex.set(skill.name, skill) - return { - prompts: { - skills: localizedSkills as LocalizedSkillPrompt[], - commands: [], - subAgents: [], - rules: [], - readme: [] - }, - promptIndex, - skills: legacySkills + skills: flatSkills } } } diff --git a/cli/src/inputs/input-aindex.ts b/cli/src/inputs/input-aindex.ts index a3d0383f..4c7af3c7 100644 --- a/cli/src/inputs/input-aindex.ts +++ b/cli/src/inputs/input-aindex.ts @@ -1,9 +1,7 @@ -import type {CollectedInputContext, InputPluginContext, Project, Workspace} from '../plugins/plugin-shared' -import type {ProjectConfig} from '../plugins/plugin-shared/types' +import type {InputCollectedContext, InputPluginContext, Project, ProjectConfig, Workspace} from '../plugins/plugin-core' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' import {parse as parseJsonc} from 'jsonc-parser' -import {FilePathKind} from '../plugins/plugin-shared' +import {AbstractInputPlugin, FilePathKind} from '../plugins/plugin-core' export class AindexInputPlugin extends AbstractInputPlugin { constructor() { @@ -35,7 +33,7 @@ export class AindexInputPlugin extends AbstractInputPlugin { } } - collect(ctx: InputPluginContext): Partial { + collect(ctx: InputPluginContext): Partial { const {userConfigOptions: options, logger, fs, path} = ctx const {workspaceDir, aindexDir} = this.resolveBasePaths(options) diff --git a/cli/src/inputs/input-command.test.ts b/cli/src/inputs/input-command.test.ts new file mode 100644 index 00000000..8fa9d1bb --- /dev/null +++ b/cli/src/inputs/input-command.test.ts @@ -0,0 +1,108 @@ +import type {InputPluginContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import {CommandInputPlugin} from './input-command' + +const legacySourceExtension = '.cn.mdx' + +function createContext(tempWorkspace: string): InputPluginContext { + const options = mergeConfig({workspaceDir: tempWorkspace}) + + return { + logger: createLogger('CommandInputPluginTest', 'error'), + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputPluginContext +} + +describe('command input plugin', () => { + it('prefers dist content, and dist also runs mdx-to-md compilation', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const srcDir = path.join(aindexDir, 'commands') + const distDir = path.join(aindexDir, 'dist', 'commands') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + + const srcFile = path.join(srcDir, 'demo.src.mdx') + const distFile = path.join(distDir, 'demo.mdx') + const srcContent = '---\ndescription: src\n---\nCommand source' + const distContent = '---\ndescription: dist\n---\nexport const x = 1\n\nCommand dist' + fs.writeFileSync(srcFile, srcContent, 'utf8') + fs.writeFileSync(distFile, distContent, 'utf8') + + const plugin = new CommandInputPlugin() + const result = await plugin.collect(createContext(tempWorkspace)) + const [command] = result.commands ?? [] + + expect(result.commands?.length ?? 0).toBe(1) + expect(command?.commandName).toBe('demo') + expect(command?.content).toContain('Command dist') + expect(command?.content).not.toContain('Command source') + expect(command?.content).not.toContain('export const x = 1') + expect(command?.yamlFrontMatter?.description).toBe('dist') + expect(command?.rawMdxContent).toContain('export const x = 1') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('loads commands from dist when the source tree is missing', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-dist-only-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const distDir = path.join(aindexDir, 'dist', 'commands') + + try { + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync( + path.join(distDir, 'demo.mdx'), + '---\ndescription: dist only\n---\nDist only command', + 'utf8' + ) + + const plugin = new CommandInputPlugin() + const result = await plugin.collect(createContext(tempWorkspace)) + + expect(result.commands?.length ?? 0).toBe(1) + expect(result.commands?.[0]?.commandName).toBe('demo') + expect(result.commands?.[0]?.content).toContain('Dist only command') + expect(result.commands?.[0]?.yamlFrontMatter?.description).toBe('dist only') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('ignores legacy cn command sources', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-command-legacy-test-')) + const srcDir = path.join(tempWorkspace, 'aindex', 'commands') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.writeFileSync( + path.join(srcDir, `demo${legacySourceExtension}`), + '---\ndescription: legacy\n---\nLegacy command', + 'utf8' + ) + + const plugin = new CommandInputPlugin() + const result = await plugin.collect(createContext(tempWorkspace)) + + expect(result.commands ?? []).toHaveLength(0) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/inputs/input-command.ts b/cli/src/inputs/input-command.ts index 19f31428..a232e42e 100644 --- a/cli/src/inputs/input-command.ts +++ b/cli/src/inputs/input-command.ts @@ -1,43 +1,31 @@ import type { - CollectedInputContext, CommandPrompt, + CommandYAMLFrontMatter, + InputCollectedContext, InputPluginContext, - Locale, - LocalizedCommandPrompt, - PluginOptions, - ResolvedBasePaths -} from '../plugins/plugin-shared' + Locale +} from '../plugins/plugin-core' import { AbstractInputPlugin, - createLocalizedPromptReader -} from '@truenine/plugin-input-shared' -import { + createLocalizedPromptReader, FilePathKind, - PromptKind -} from '../plugins/plugin-shared' + PromptKind, + SourceLocaleExtensions -export interface CommandPrefixInfo { - readonly commandPrefix?: string - readonly commandName: string -} +} from '../plugins/plugin-core' export class CommandInputPlugin extends AbstractInputPlugin { constructor() { super('CommandInputPlugin') } - private getDistDir(options: Required, resolvedPaths: ResolvedBasePaths): string { - return this.resolveAindexPath(options.aindex.commands.dist, resolvedPaths.aindexDir) - } - private createCommandPrompt( content: string, _locale: Locale, name: string, - _srcDir: string, distDir: string, ctx: InputPluginContext, - _rawContent?: string + metadata?: Record ): CommandPrompt { const {path} = ctx @@ -46,12 +34,18 @@ export class CommandInputPlugin extends AbstractInputPlugin { const parentDirName = slashIndex !== -1 ? normalizedName.slice(0, slashIndex) : void 0 const fileName = slashIndex !== -1 ? normalizedName.slice(slashIndex + 1) : normalizedName - const prefixInfo = this.extractPrefixInfo(fileName, parentDirName) + const baseName = fileName.replace(/\.mdx$/, '') + const underscoreIndex = baseName.indexOf('_') + const commandPrefix = parentDirName ?? (underscoreIndex === -1 ? void 0 : baseName.slice(0, Math.max(0, underscoreIndex))) + const commandName = parentDirName != null || underscoreIndex === -1 + ? baseName + : baseName.slice(Math.max(0, underscoreIndex + 1)) const filePath = path.join(distDir, `${name}.mdx`) const entryName = `${name}.mdx` + const yamlFrontMatter = metadata as CommandYAMLFrontMatter | undefined - return { + const prompt: CommandPrompt = { type: PromptKind.Command, content, length: content.length, @@ -63,37 +57,24 @@ export class CommandInputPlugin extends AbstractInputPlugin { getDirectoryName: () => entryName.replace(/\.mdx$/, ''), getAbsolutePath: () => filePath }, - ...prefixInfo.commandPrefix != null && {commandPrefix: prefixInfo.commandPrefix}, - commandName: prefixInfo.commandName + ...commandPrefix != null && {commandPrefix}, + commandName } as CommandPrompt - } - - extractPrefixInfo(fileName: string, parentDirName?: string): CommandPrefixInfo { - const baseName = fileName.replace(/\.mdx$/, '') - - if (parentDirName != null) { - return { - commandPrefix: parentDirName, - commandName: baseName - } - } - - const underscoreIndex = baseName.indexOf('_') - if (underscoreIndex === -1) return {commandName: baseName} + if (yamlFrontMatter == null) return prompt - return { - commandPrefix: baseName.slice(0, Math.max(0, underscoreIndex)), - commandName: baseName.slice(Math.max(0, underscoreIndex + 1)) - } + Object.assign(prompt, {yamlFrontMatter}) + if (yamlFrontMatter.seriName != null) Object.assign(prompt, {seriName: yamlFrontMatter.seriName}) + if (yamlFrontMatter.scope === 'global') Object.assign(prompt, {globalOnly: true}) + return prompt } - override async collect(ctx: InputPluginContext): Promise> { + override async collect(ctx: InputPluginContext): Promise> { const {userConfigOptions: options, logger, path, fs, globalScope} = ctx const resolvedPaths = this.resolveBasePaths(options) const srcDir = this.resolveAindexPath(options.aindex.commands.src, resolvedPaths.aindexDir) - const distDir = this.getDistDir(options, resolvedPaths) + const distDir = this.resolveAindexPath(options.aindex.commands.dist, resolvedPaths.aindexDir) logger.debug('CommandInputPlugin collecting', { srcDir, @@ -108,15 +89,15 @@ export class CommandInputPlugin extends AbstractInputPlugin { distDir, { kind: PromptKind.Command, - localeExtensions: {zh: '.cn.mdx', en: '.mdx'}, + localeExtensions: SourceLocaleExtensions, isDirectoryStructure: false, - createPrompt: async (content, locale, name) => this.createCommandPrompt( + createPrompt: (content, locale, name, metadata) => this.createCommandPrompt( content, locale, name, - srcDir, distDir, - ctx + ctx, + metadata ) } ) @@ -128,30 +109,33 @@ export class CommandInputPlugin extends AbstractInputPlugin { for (const error of errors) logger.warn('Failed to read command', {path: error.path, phase: error.phase, error: error.error}) - const legacyCommands: CommandPrompt[] = [] + const flatCommands: CommandPrompt[] = [] for (const localized of localizedCommands) { - const prompt = localized.dist?.prompt ?? localized.src.default.prompt - if (prompt) legacyCommands.push(prompt) + const distContent = localized.dist + if (distContent?.prompt != null) { + const {prompt: distPrompt, rawMdx} = distContent + flatCommands.push(rawMdx == null + ? distPrompt + : {...distPrompt, rawMdxContent: rawMdx}) + continue + } + + const srcPrompt = localized.src.default.prompt + if (srcPrompt != null) { + const {rawMdx} = localized.src.default + flatCommands.push(rawMdx == null + ? srcPrompt + : {...srcPrompt, rawMdxContent: rawMdx}) + } } - logger.debug('CommandInputPlugin legacy commands', { - count: legacyCommands.length, - commands: legacyCommands.map(c => c.commandName) + logger.debug('CommandInputPlugin flattened commands', { + count: flatCommands.length, + commands: flatCommands.map(c => c.commandName) }) - const promptIndex = new Map() - for (const cmd of localizedCommands) promptIndex.set(cmd.name, cmd) - return { - prompts: { - skills: [], - commands: localizedCommands, - subAgents: [], - rules: [], - readme: [] - }, - promptIndex, - commands: legacyCommands + commands: flatCommands } } } diff --git a/cli/src/inputs/input-editorconfig.ts b/cli/src/inputs/input-editorconfig.ts index ce8f6517..27dcee95 100644 --- a/cli/src/inputs/input-editorconfig.ts +++ b/cli/src/inputs/input-editorconfig.ts @@ -1,42 +1,21 @@ -import type {CollectedInputContext, InputPluginContext, ProjectIDEConfigFile} from '../plugins/plugin-shared' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' -import {FilePathKind, IDEKind} from '../plugins/plugin-shared' - -function readIdeConfigFile( - type: T, - relativePath: string, - aindexDir: string, - fs: typeof import('node:fs'), - path: typeof import('node:path') -): ProjectIDEConfigFile | undefined { - const absPath = path.join(aindexDir, relativePath) - if (!(fs.existsSync(absPath) && fs.statSync(absPath).isFile())) return void 0 - - const content = fs.readFileSync(absPath, 'utf8') - return { - type, - content, - length: content.length, - filePathKind: FilePathKind.Absolute, - dir: { - pathKind: FilePathKind.Absolute, - path: absPath, - getDirectoryName: () => path.basename(absPath) - } - } -} +import type {InputCollectedContext, InputPluginContext, ProjectIDEConfigFile} from '../plugins/plugin-core' +import {AbstractInputPlugin, IDEKind} from '../plugins/plugin-core' +import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' export class EditorConfigInputPlugin extends AbstractInputPlugin { constructor() { super('EditorConfigInputPlugin') } - collect(ctx: InputPluginContext): Partial { - const {userConfigOptions, fs, path} = ctx - const {aindexDir} = this.resolveBasePaths(userConfigOptions) + collect(ctx: InputPluginContext): Partial { + const {userConfigOptions, fs} = ctx + const {workspaceDir, aindexDir} = this.resolveBasePaths(userConfigOptions) const editorConfigFiles: ProjectIDEConfigFile[] = [] - const file = readIdeConfigFile(IDEKind.EditorConfig, '.editorconfig', aindexDir, fs, path) + const file = readPublicIdeConfigDefinitionFile(IDEKind.EditorConfig, '.editorconfig', aindexDir, fs, { + command: ctx.runtimeCommand, + workspaceDir + }) if (file != null) editorConfigFiles.push(file) return {editorConfigFiles} diff --git a/cli/src/inputs/input-git-exclude.ts b/cli/src/inputs/input-git-exclude.ts index 6b7af6c8..230bf88c 100644 --- a/cli/src/inputs/input-git-exclude.ts +++ b/cli/src/inputs/input-git-exclude.ts @@ -1,15 +1,18 @@ -import type {CollectedInputContext, InputPluginContext} from '../plugins/plugin-shared' -import * as path from 'node:path' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' +import type {InputCollectedContext, InputPluginContext} from '../plugins/plugin-core' +import {AbstractInputPlugin} from '../plugins/plugin-core' +import {PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, resolvePublicDefinitionPath} from '../public-config-paths' export class GitExcludeInputPlugin extends AbstractInputPlugin { constructor() { super('GitExcludeInputPlugin') } - collect(ctx: InputPluginContext): Partial { - const {aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) - const filePath = path.join(aindexDir, 'public', 'exclude') + collect(ctx: InputPluginContext): Partial { + const {workspaceDir, aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) + const filePath = resolvePublicDefinitionPath(aindexDir, PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, { + command: ctx.runtimeCommand, + workspaceDir + }) if (!ctx.fs.existsSync(filePath)) { this.log.debug({action: 'collect', message: 'File not found', path: filePath}) diff --git a/cli/src/inputs/input-gitignore.ts b/cli/src/inputs/input-gitignore.ts index 46708045..e57d9de8 100644 --- a/cli/src/inputs/input-gitignore.ts +++ b/cli/src/inputs/input-gitignore.ts @@ -1,15 +1,18 @@ -import type {CollectedInputContext, InputPluginContext} from '../plugins/plugin-shared' -import * as path from 'node:path' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' +import type {InputCollectedContext, InputPluginContext} from '../plugins/plugin-core' +import {AbstractInputPlugin} from '../plugins/plugin-core' +import {PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, resolvePublicDefinitionPath} from '../public-config-paths' export class GitIgnoreInputPlugin extends AbstractInputPlugin { constructor() { super('GitIgnoreInputPlugin') } - collect(ctx: InputPluginContext): Partial { - const {aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) - const filePath = path.join(aindexDir, 'public', 'gitignore') + collect(ctx: InputPluginContext): Partial { + const {workspaceDir, aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) + const filePath = resolvePublicDefinitionPath(aindexDir, PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, { + command: ctx.runtimeCommand, + workspaceDir + }) if (!ctx.fs.existsSync(filePath)) { this.log.debug({action: 'collect', message: 'File not found', path: filePath}) diff --git a/cli/src/inputs/input-global-memory.ts b/cli/src/inputs/input-global-memory.ts index b0ae48fc..2750cc84 100644 --- a/cli/src/inputs/input-global-memory.ts +++ b/cli/src/inputs/input-global-memory.ts @@ -1,4 +1,4 @@ -import type {CollectedInputContext, InputPluginContext} from '../plugins/plugin-shared' +import type {InputCollectedContext, InputPluginContext} from '../plugins/plugin-core' import * as os from 'node:os' import process from 'node:process' @@ -6,19 +6,14 @@ import process from 'node:process' import {mdxToMd} from '@truenine/md-compiler' import {ScopeError} from '@truenine/md-compiler/errors' import {parseMarkdown} from '@truenine/md-compiler/markdown' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' -import { - FilePathKind, - GlobalConfigDirectoryType, - PromptKind -} from '../plugins/plugin-shared' +import {AbstractInputPlugin, FilePathKind, GlobalConfigDirectoryType, PromptKind} from '../plugins/plugin-core' export class GlobalMemoryInputPlugin extends AbstractInputPlugin { constructor() { super('GlobalMemoryInputPlugin') } - async collect(ctx: InputPluginContext): Promise> { + async collect(ctx: InputPluginContext): Promise> { const {userConfigOptions: options, fs, path, globalScope} = ctx const {aindexDir} = this.resolveBasePaths(options) diff --git a/cli/src/inputs/input-jetbrains-config.ts b/cli/src/inputs/input-jetbrains-config.ts index 38d601f1..eaad2794 100644 --- a/cli/src/inputs/input-jetbrains-config.ts +++ b/cli/src/inputs/input-jetbrains-config.ts @@ -1,39 +1,15 @@ -import type {CollectedInputContext, InputPluginContext, ProjectIDEConfigFile} from '../plugins/plugin-shared' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' -import {FilePathKind, IDEKind} from '../plugins/plugin-shared' - -function readIdeConfigFile( - type: T, - relativePath: string, - aindexDir: string, - fs: typeof import('node:fs'), - path: typeof import('node:path') -): ProjectIDEConfigFile | undefined { - const absPath = path.join(aindexDir, relativePath) - if (!(fs.existsSync(absPath) && fs.statSync(absPath).isFile())) return void 0 - - const content = fs.readFileSync(absPath, 'utf8') - return { - type, - content, - length: content.length, - filePathKind: FilePathKind.Absolute, - dir: { - pathKind: FilePathKind.Absolute, - path: absPath, - getDirectoryName: () => path.basename(absPath) - } - } -} +import type {InputCollectedContext, InputPluginContext, ProjectIDEConfigFile} from '../plugins/plugin-core' +import {AbstractInputPlugin, IDEKind} from '../plugins/plugin-core' +import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' export class JetBrainsConfigInputPlugin extends AbstractInputPlugin { constructor() { super('JetBrainsConfigInputPlugin') } - collect(ctx: InputPluginContext): Partial { - const {userConfigOptions, fs, path} = ctx - const {aindexDir} = this.resolveBasePaths(userConfigOptions) + collect(ctx: InputPluginContext): Partial { + const {userConfigOptions, fs} = ctx + const {workspaceDir, aindexDir} = this.resolveBasePaths(userConfigOptions) const files = [ '.idea/codeStyles/Project.xml', @@ -43,7 +19,10 @@ export class JetBrainsConfigInputPlugin extends AbstractInputPlugin { const jetbrainsConfigFiles: ProjectIDEConfigFile[] = [] for (const relativePath of files) { - const file = readIdeConfigFile(IDEKind.IntellijIDEA, relativePath, aindexDir, fs, path) + const file = readPublicIdeConfigDefinitionFile(IDEKind.IntellijIDEA, relativePath, aindexDir, fs, { + command: ctx.runtimeCommand, + workspaceDir + }) if (file != null) jetbrainsConfigFiles.push(file) } diff --git a/cli/src/inputs/input-project-prompt.ts b/cli/src/inputs/input-project-prompt.ts index 79167c60..2fa96d26 100644 --- a/cli/src/inputs/input-project-prompt.ts +++ b/cli/src/inputs/input-project-prompt.ts @@ -1,21 +1,17 @@ import type { - CollectedInputContext, + InputCollectedContext, InputPluginContext, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt, YAMLFrontMatter -} from '../plugins/plugin-shared' +} from '../plugins/plugin-core' import process from 'node:process' import {mdxToMd} from '@truenine/md-compiler' import {ScopeError} from '@truenine/md-compiler/errors' import {parseMarkdown} from '@truenine/md-compiler/markdown' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' -import { - FilePathKind, - PromptKind -} from '../plugins/plugin-shared' +import {AbstractInputPlugin, FilePathKind, PromptKind} from '../plugins/plugin-core' const PROJECT_MEMORY_FILE = 'agt.mdx' const SCAN_SKIP_DIRECTORIES: readonly string[] = ['node_modules', '.git'] as const @@ -25,7 +21,7 @@ export class ProjectPromptInputPlugin extends AbstractInputPlugin { super('ProjectPromptInputPlugin', ['AindexInputPlugin']) } - async collect(ctx: InputPluginContext): Promise> { + async collect(ctx: InputPluginContext): Promise> { const {dependencyContext, fs, userConfigOptions: options, path, globalScope} = ctx const {aindexDir} = this.resolveBasePaths(options) diff --git a/cli/src/inputs/input-public-config.test.ts b/cli/src/inputs/input-public-config.test.ts new file mode 100644 index 00000000..bd97b544 --- /dev/null +++ b/cli/src/inputs/input-public-config.test.ts @@ -0,0 +1,239 @@ +import type {InputPluginContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import { + AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS, + PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, + PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, + resolvePublicDefinitionPath +} from '../public-config-paths' +import {EditorConfigInputPlugin} from './input-editorconfig' +import {GitExcludeInputPlugin} from './input-git-exclude' +import {GitIgnoreInputPlugin} from './input-gitignore' +import {JetBrainsConfigInputPlugin} from './input-jetbrains-config' +import {AIAgentIgnoreInputPlugin} from './input-shared-ignore' +import {VSCodeConfigInputPlugin} from './input-vscode-config' + +interface TestContextOptions { + readonly aindexDir?: string + readonly runtimeCommand?: InputPluginContext['runtimeCommand'] +} + +function createContext(tempWorkspace: string, options?: TestContextOptions): InputPluginContext { + const mergedOptions = mergeConfig({ + workspaceDir: tempWorkspace, + ...options?.aindexDir != null + ? { + aindex: { + dir: options.aindexDir + } + } + : {} + }) + + return { + logger: createLogger('PublicConfigInputPluginTest', 'error'), + fs, + path, + glob, + userConfigOptions: mergedOptions, + dependencyContext: {}, + ...options?.runtimeCommand != null ? {runtimeCommand: options.runtimeCommand} : {} + } as InputPluginContext +} + +function writePublicDefinition(tempWorkspace: string, targetRelativePath: string, content: string): string { + const filePath = resolvePublicDefinitionPath(path.join(tempWorkspace, 'aindex'), targetRelativePath) + fs.mkdirSync(path.dirname(filePath), {recursive: true}) + fs.writeFileSync(filePath, content, 'utf8') + return filePath +} + +function writePublicProxy(tempWorkspace: string, source: string): string { + return writePublicDefinition(tempWorkspace, 'proxy.ts', source) +} + +describe('public config input plugins', () => { + it('reads config definitions from target-relative public paths', () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-public-config-input-')) + + try { + const aindexDir = path.join(tempWorkspace, 'aindex') + const gitIgnorePath = writePublicDefinition(tempWorkspace, PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, 'dist/\n') + const gitExcludePath = writePublicDefinition(tempWorkspace, PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, '.idea/\n') + const editorConfigPath = writePublicDefinition(tempWorkspace, '.editorconfig', 'root = true\n') + writePublicDefinition(tempWorkspace, '.vscode/settings.json', '{"editor.tabSize": 2}\n') + writePublicDefinition(tempWorkspace, '.vscode/extensions.json', '{"recommendations":["foo.bar"]}\n') + writePublicDefinition(tempWorkspace, '.idea/.gitignore', '/workspace.xml\n') + writePublicDefinition(tempWorkspace, '.idea/codeStyles/Project.xml', '\n') + writePublicDefinition(tempWorkspace, '.idea/codeStyles/codeStyleConfig.xml', '\n') + + for (const fileName of AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS) writePublicDefinition(tempWorkspace, fileName, `${fileName}\n`) + + const ctx = createContext(tempWorkspace) + const gitIgnore = new GitIgnoreInputPlugin().collect(ctx) + const gitExclude = new GitExcludeInputPlugin().collect(ctx) + const editorConfig = new EditorConfigInputPlugin().collect(ctx) + const vscode = new VSCodeConfigInputPlugin().collect(ctx) + const jetbrains = new JetBrainsConfigInputPlugin().collect(ctx) + const ignoreFiles = new AIAgentIgnoreInputPlugin().collect(ctx) + + expect(gitIgnore.globalGitIgnore).toBe('dist/\n') + expect(gitExclude.shadowGitExclude).toBe('.idea/\n') + expect(editorConfig.editorConfigFiles?.[0]?.dir.path).toBe(editorConfigPath) + expect(vscode.vscodeConfigFiles?.map(file => file.dir.path)).toEqual([ + path.join(aindexDir, 'public', '.vscode', 'settings.json'), + path.join(aindexDir, 'public', '.vscode', 'extensions.json') + ]) + expect(jetbrains.jetbrainsConfigFiles?.map(file => file.dir.path)).toEqual([ + path.join(aindexDir, 'public', '.idea', 'codeStyles', 'Project.xml'), + path.join(aindexDir, 'public', '.idea', 'codeStyles', 'codeStyleConfig.xml'), + path.join(aindexDir, 'public', '.idea', '.gitignore') + ]) + expect(ignoreFiles.aiAgentIgnoreConfigFiles?.map(file => file.fileName)).toEqual([ + ...AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS + ]) + expect(ignoreFiles.aiAgentIgnoreConfigFiles?.map(file => file.sourcePath)).toEqual( + AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS.map(fileName => resolvePublicDefinitionPath(aindexDir, fileName)) + ) + expect(gitIgnorePath).toBe(path.join(aindexDir, 'public', '.gitignore')) + expect(gitExcludePath).toBe(path.join(aindexDir, 'public', '.git', 'info', 'exclude')) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('does not read legacy definition locations after the hard cut', () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-public-config-legacy-')) + + try { + const aindexDir = path.join(tempWorkspace, 'aindex') + fs.mkdirSync(path.join(aindexDir, 'public'), {recursive: true}) + fs.mkdirSync(path.join(aindexDir, '.vscode'), {recursive: true}) + fs.mkdirSync(path.join(aindexDir, '.idea', 'codeStyles'), {recursive: true}) + + fs.writeFileSync(path.join(aindexDir, 'public', 'gitignore'), 'legacy gitignore\n', 'utf8') + fs.writeFileSync(path.join(aindexDir, 'public', 'exclude'), 'legacy exclude\n', 'utf8') + fs.writeFileSync(path.join(aindexDir, '.editorconfig'), 'root = true\n', 'utf8') + fs.writeFileSync(path.join(aindexDir, '.vscode', 'settings.json'), '{}\n', 'utf8') + fs.writeFileSync(path.join(aindexDir, '.vscode', 'extensions.json'), '{}\n', 'utf8') + fs.writeFileSync(path.join(aindexDir, '.idea', '.gitignore'), '/workspace.xml\n', 'utf8') + fs.writeFileSync(path.join(aindexDir, '.idea', 'codeStyles', 'Project.xml'), '\n', 'utf8') + fs.writeFileSync(path.join(aindexDir, '.idea', 'codeStyles', 'codeStyleConfig.xml'), '\n', 'utf8') + fs.writeFileSync(path.join(aindexDir, '.cursorignore'), '.cursor/\n', 'utf8') + + const ctx = createContext(tempWorkspace) + + expect(new GitIgnoreInputPlugin().collect(ctx).globalGitIgnore).toBeUndefined() + expect(new GitExcludeInputPlugin().collect(ctx).shadowGitExclude).toBeUndefined() + expect(new EditorConfigInputPlugin().collect(ctx).editorConfigFiles ?? []).toHaveLength(0) + expect(new VSCodeConfigInputPlugin().collect(ctx).vscodeConfigFiles ?? []).toHaveLength(0) + expect(new JetBrainsConfigInputPlugin().collect(ctx).jetbrainsConfigFiles ?? []).toHaveLength(0) + expect(new AIAgentIgnoreInputPlugin().collect(ctx).aiAgentIgnoreConfigFiles ?? []).toHaveLength(0) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('routes public definitions through public/proxy.ts transparently', () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-public-config-proxy-')) + + try { + const aindexDir = path.join(tempWorkspace, 'aindex') + writePublicProxy( + tempWorkspace, + [ + 'export default (logicalPath) => {', + ' const normalizedPath = logicalPath.replaceAll("\\\\", "/")', + ' if (normalizedPath.startsWith(".git/")) return normalizedPath.replace(/^\\.git\\//, "____.git/")', + ' if (normalizedPath === ".idea/.gitignore") return ".idea/.gitignore"', + ' if (normalizedPath.startsWith(".idea/")) return normalizedPath', + ' if (!normalizedPath.startsWith(".")) return normalizedPath', + ' return normalizedPath.replace(/^\\.([^/\\\\]+)/, "____$1")', + '}', + '' + ].join('\n') + ) + + const gitIgnorePath = writePublicDefinition(tempWorkspace, PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, 'dist/\n') + const gitExcludePath = writePublicDefinition(tempWorkspace, PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, '.idea/\n') + const editorConfigPath = writePublicDefinition(tempWorkspace, '.editorconfig', 'root = true\n') + const vscodeSettingsPath = writePublicDefinition(tempWorkspace, '.vscode/settings.json', '{"editor.tabSize": 2}\n') + const vscodeExtensionsPath = writePublicDefinition(tempWorkspace, '.vscode/extensions.json', '{"recommendations":["foo.bar"]}\n') + const ideaGitIgnorePath = writePublicDefinition(tempWorkspace, '.idea/.gitignore', '/workspace.xml\n') + const ideaProjectPath = writePublicDefinition(tempWorkspace, '.idea/codeStyles/Project.xml', '\n') + const ideaCodeStyleConfigPath = writePublicDefinition(tempWorkspace, '.idea/codeStyles/codeStyleConfig.xml', '\n') + + for (const fileName of AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS) writePublicDefinition(tempWorkspace, fileName, `${fileName}\n`) + + const ctx = createContext(tempWorkspace) + const gitIgnore = new GitIgnoreInputPlugin().collect(ctx) + const gitExclude = new GitExcludeInputPlugin().collect(ctx) + const editorConfig = new EditorConfigInputPlugin().collect(ctx) + const vscode = new VSCodeConfigInputPlugin().collect(ctx) + const jetbrains = new JetBrainsConfigInputPlugin().collect(ctx) + const ignoreFiles = new AIAgentIgnoreInputPlugin().collect(ctx) + + expect(gitIgnore.globalGitIgnore).toBe('dist/\n') + expect(gitExclude.shadowGitExclude).toBe('.idea/\n') + expect(editorConfig.editorConfigFiles?.[0]?.dir.path).toBe(editorConfigPath) + expect(vscode.vscodeConfigFiles?.map(file => file.dir.path)).toEqual([ + vscodeSettingsPath, + vscodeExtensionsPath + ]) + expect(jetbrains.jetbrainsConfigFiles?.map(file => file.dir.path)).toEqual([ + ideaProjectPath, + ideaCodeStyleConfigPath, + ideaGitIgnorePath + ]) + expect(ignoreFiles.aiAgentIgnoreConfigFiles?.map(file => file.sourcePath)).toEqual( + AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS.map(fileName => resolvePublicDefinitionPath(aindexDir, fileName)) + ) + expect(gitIgnorePath).toBe(path.join(aindexDir, 'public', '____gitignore')) + expect(gitExcludePath).toBe(path.join(aindexDir, 'public', '____.git', 'info', 'exclude')) + expect(editorConfigPath).toBe(path.join(aindexDir, 'public', '____editorconfig')) + expect(vscodeSettingsPath).toBe(path.join(aindexDir, 'public', '____vscode', 'settings.json')) + expect(vscodeExtensionsPath).toBe(path.join(aindexDir, 'public', '____vscode', 'extensions.json')) + expect(ideaGitIgnorePath).toBe(path.join(aindexDir, 'public', '.idea', '.gitignore')) + expect(ideaProjectPath).toBe(path.join(aindexDir, 'public', '.idea', 'codeStyles', 'Project.xml')) + expect(ideaCodeStyleConfigPath).toBe(path.join(aindexDir, 'public', '.idea', 'codeStyles', 'codeStyleConfig.xml')) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('passes the configured workspace root into public/proxy.ts', () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-public-config-nested-aindex-')) + + try { + const aindexDir = path.join(tempWorkspace, 'config', 'aindex') + const publicDir = path.join(aindexDir, 'public') + fs.mkdirSync(path.join(publicDir, 'expected'), {recursive: true}) + fs.writeFileSync(path.join(publicDir, 'proxy.ts'), [ + 'export default (_logicalPath, ctx) => {', + ` return ctx.workspaceDir === ${JSON.stringify(tempWorkspace)} && ctx.cwd === ${JSON.stringify(tempWorkspace)}`, + ' ? "expected/.gitignore"', + ' : "unexpected/.gitignore"', + '}', + '' + ].join('\n'), 'utf8') + fs.writeFileSync(path.join(publicDir, 'expected', '.gitignore'), 'dist/\n', 'utf8') + + const ctx = createContext(tempWorkspace, {aindexDir: 'config/aindex'}) + const gitIgnore = new GitIgnoreInputPlugin().collect(ctx) + + expect(gitIgnore.globalGitIgnore).toBe('dist/\n') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/inputs/input-readme.ts b/cli/src/inputs/input-readme.ts index b5c50d64..958fc8fe 100644 --- a/cli/src/inputs/input-readme.ts +++ b/cli/src/inputs/input-readme.ts @@ -1,11 +1,10 @@ -import type {CollectedInputContext, InputPluginContext, ReadmeFileKind, ReadmePrompt, RelativePath} from '../plugins/plugin-shared' +import type {InputCollectedContext, InputPluginContext, ReadmeFileKind, ReadmePrompt, RelativePath} from '../plugins/plugin-core' import process from 'node:process' import {mdxToMd} from '@truenine/md-compiler' import {ScopeError} from '@truenine/md-compiler/errors' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' -import {FilePathKind, PromptKind, README_FILE_KIND_MAP} from '../plugins/plugin-shared' +import {AbstractInputPlugin, FilePathKind, PromptKind, README_FILE_KIND_MAP} from '../plugins/plugin-core' const ALL_FILE_KINDS = Object.entries(README_FILE_KIND_MAP) as [ReadmeFileKind, {src: string, out: string}][] @@ -14,7 +13,7 @@ export class ReadmeMdInputPlugin extends AbstractInputPlugin { super('ReadmeMdInputPlugin', ['AindexInputPlugin']) } - async collect(ctx: InputPluginContext): Promise> { + async collect(ctx: InputPluginContext): Promise> { const {userConfigOptions: options, logger, fs, path, globalScope} = ctx const {workspaceDir, aindexDir} = this.resolveBasePaths(options) diff --git a/cli/src/inputs/input-rule.test.ts b/cli/src/inputs/input-rule.test.ts new file mode 100644 index 00000000..4612c4bc --- /dev/null +++ b/cli/src/inputs/input-rule.test.ts @@ -0,0 +1,77 @@ +import type {InputPluginContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import {RuleInputPlugin} from './input-rule' + +function createContext(tempWorkspace: string): InputPluginContext { + const options = mergeConfig({workspaceDir: tempWorkspace}) + + return { + logger: createLogger('RuleInputPluginTest', 'error'), + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputPluginContext +} + +describe('rule input plugin', () => { + it('loads rules from .src.mdx source files', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-rule-src-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const srcDir = path.join(aindexDir, 'rules', 'qa') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.writeFileSync( + path.join(srcDir, 'boot.src.mdx'), + '---\ndescription: source only\n---\nSource only rule', + 'utf8' + ) + + const plugin = new RuleInputPlugin() + const result = await plugin.collect(createContext(tempWorkspace)) + + expect(result.rules?.length ?? 0).toBe(1) + expect(result.rules?.[0]?.ruleName).toBe('boot') + expect(result.rules?.[0]?.content).toContain('Source only rule') + expect(result.rules?.[0]?.scope).toBe('project') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('loads rules from dist when the source tree is missing', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-rule-dist-only-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const distDir = path.join(aindexDir, 'dist', 'rules', 'qa') + + try { + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync( + path.join(distDir, 'boot.mdx'), + '---\nscope: global\nglobs:\n - "**/*.ts"\n---\nDist only rule', + 'utf8' + ) + + const plugin = new RuleInputPlugin() + const result = await plugin.collect(createContext(tempWorkspace)) + + expect(result.rules?.length ?? 0).toBe(1) + expect(result.rules?.[0]?.ruleName).toBe('boot') + expect(result.rules?.[0]?.content).toContain('Dist only rule') + expect(result.rules?.[0]?.scope).toBe('global') + expect(result.rules?.[0]?.globs).toEqual(['**/*.ts']) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/inputs/input-rule.ts b/cli/src/inputs/input-rule.ts index ce51e374..8096bb8f 100644 --- a/cli/src/inputs/input-rule.ts +++ b/cli/src/inputs/input-rule.ts @@ -1,42 +1,30 @@ import type { - CollectedInputContext, + InputCollectedContext, InputPluginContext, - LocalizedRulePrompt, - PluginOptions, - ResolvedBasePaths, RulePrompt, RuleScope -} from '../plugins/plugin-shared' +} from '../plugins/plugin-core' import {mdxToMd} from '@truenine/md-compiler' -import {parseMarkdown} from '@truenine/md-compiler/markdown' import { AbstractInputPlugin, - createLocalizedPromptReader -} from '@truenine/plugin-input-shared' -import { + createLocalizedPromptReader, FilePathKind, - PromptKind -} from '../plugins/plugin-shared' + PromptKind, + SourceLocaleExtensions + +} from '../plugins/plugin-core' export class RuleInputPlugin extends AbstractInputPlugin { constructor() { super('RuleInputPlugin') } - private getDistDir(options: Required, resolvedPaths: ResolvedBasePaths): string { - return this.resolveAindexPath(options.aindex.rules.dist, resolvedPaths.aindexDir) - } - - private getSrcDir(options: Required, resolvedPaths: ResolvedBasePaths): string { - return this.resolveAindexPath(options.aindex.rules.src, resolvedPaths.aindexDir) - } - - override async collect(ctx: InputPluginContext): Promise> { + override async collect(ctx: InputPluginContext): Promise> { const {userConfigOptions: options, logger, path, fs, globalScope} = ctx const resolvedPaths = this.resolveBasePaths(options) - const srcDir = this.getSrcDir(options, resolvedPaths) - const distDir = this.getDistDir(options, resolvedPaths) + const srcDir = this.resolveAindexPath(options.aindex.rules.src, resolvedPaths.aindexDir) + const distDir = this.resolveAindexPath(options.aindex.rules.dist, resolvedPaths.aindexDir) const reader = createLocalizedPromptReader(fs, path, logger, globalScope) @@ -45,29 +33,35 @@ export class RuleInputPlugin extends AbstractInputPlugin { distDir, { kind: PromptKind.Rule, - localeExtensions: {zh: '.cn.mdx', en: '.mdx'}, + localeExtensions: SourceLocaleExtensions, isDirectoryStructure: false, - createPrompt: async (content, _locale, name) => { + createPrompt: async (content, _locale, name, _metadata) => { const distFilePath = path.join(distDir, `${name}.mdx`) let globs: readonly string[] = [] let scope: RuleScope = 'project' let seriName: string | undefined, - yamlFrontMatter: Record | undefined, - rawFrontMatter: string | undefined + yamlFrontMatter: Record | undefined try { const rawContent = fs.readFileSync(distFilePath, 'utf8') - const {yamlFrontMatter: yfm, rawFrontMatter: rfm} = parseMarkdown(rawContent) - if (yfm) { - yamlFrontMatter = yfm - rawFrontMatter = rfm - globs = (yfm['globs'] as string[]) ?? [] - scope = (yfm['scope'] as RuleScope) ?? 'project' - seriName = yfm['seriName'] as string | undefined + const {metadata} = await mdxToMd(rawContent, { // Use mdxToMd to extract metadata from export default syntax + globalScope, + extractMetadata: true, + basePath: distDir + }) + if (metadata?.fields != null) { + yamlFrontMatter = metadata.fields + globs = (metadata.fields['globs'] as string[]) ?? [] + scope = (metadata.fields['scope'] as RuleScope) ?? 'project' + seriName = metadata.fields['seriName'] as string | undefined } } catch { /* Ignore errors */ } + const normalizedName = name.replaceAll('\\', '/') // Normalize path separator for cross-platform compatibility + const prefix = normalizedName.includes('/') ? normalizedName.split('/')[0] ?? '' : '' + const ruleName = normalizedName.split('/').pop() ?? normalizedName + const rulePrompt = { type: PromptKind.Rule, content, @@ -77,18 +71,17 @@ export class RuleInputPlugin extends AbstractInputPlugin { pathKind: FilePathKind.Relative, path: `${name}.mdx`, basePath: distDir, - getDirectoryName: () => name.split('/').pop() ?? name, + getDirectoryName: () => ruleName, getAbsolutePath: () => path.join(distDir, `${name}.mdx`) }, - series: name.includes('/') ? name.split('/')[0] ?? '' : '', - ruleName: name.split('/').pop() ?? name, + prefix, + ruleName, globs, scope, markdownContents: [] } as RulePrompt if (yamlFrontMatter != null) Object.assign(rulePrompt, {yamlFrontMatter}) - if (rawFrontMatter != null) Object.assign(rulePrompt, {rawFrontMatter}) if (seriName != null) Object.assign(rulePrompt, {seriName}) return rulePrompt @@ -96,133 +89,10 @@ export class RuleInputPlugin extends AbstractInputPlugin { } ) - const legacyRules: RulePrompt[] = [] - const localizedRules: LocalizedRulePrompt[] = [...localizedRulesFromSrc] - - if (fs.existsSync(distDir)) { - try { - const entries = fs.readdirSync(distDir, {withFileTypes: true}) - - for (const entry of entries) { - if (!entry.isDirectory()) continue - - const seriesName = entry.name - const seriesDir = path.join(distDir, seriesName) - - const alreadyProcessed = localizedRulesFromSrc.some(r => r.name.startsWith(`${seriesName}/`)) - if (alreadyProcessed) continue - - try { - const files = fs.readdirSync(seriesDir, {withFileTypes: true}) - - for (const file of files) { - if (!file.isFile() || !file.name.endsWith('.mdx')) continue - - const baseName = file.name.slice(0, -'.mdx'.length) - const name = `${seriesName}/${baseName}` - const distFilePath = path.join(seriesDir, file.name) - - if (localizedRulesFromSrc.some(r => r.name === name)) continue - - try { - const rawContent = fs.readFileSync(distFilePath, 'utf8') - const parsed = parseMarkdown(rawContent) - - const content = globalScope != null ? await mdxToMd(rawContent, {globalScope, basePath: seriesDir}) : parsed.contentWithoutFrontMatter ?? rawContent - - const {yamlFrontMatter} = parsed - const globs = (yamlFrontMatter?.['globs'] as string[]) ?? [] - const scope = (yamlFrontMatter?.['scope'] as RuleScope) ?? 'project' - const seriName = yamlFrontMatter?.['seriName'] as string | undefined - - const rulePrompt = { - type: PromptKind.Rule, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: `${name}.mdx`, - basePath: distDir, - getDirectoryName: () => baseName, - getAbsolutePath: () => distFilePath - }, - series: seriesName, - ruleName: baseName, - globs, - scope, - markdownContents: [] - } as RulePrompt - - if (yamlFrontMatter != null) Object.assign(rulePrompt, {yamlFrontMatter}) - if (parsed.rawFrontMatter != null) Object.assign(rulePrompt, {rawFrontMatter: parsed.rawFrontMatter}) - if (seriName != null) Object.assign(rulePrompt, {seriName}) - - legacyRules.push(rulePrompt) - - const localizedPrompt: LocalizedRulePrompt = { - name, - type: PromptKind.Rule, - src: { - zh: { - content, - lastModified: fs.statSync(distFilePath).mtime, - prompt: rulePrompt, - filePath: distFilePath - }, - default: { - content, - lastModified: fs.statSync(distFilePath).mtime, - prompt: rulePrompt, - filePath: distFilePath - }, - defaultLocale: 'zh' - }, - dist: { - content, - lastModified: fs.statSync(distFilePath).mtime, - prompt: rulePrompt, - filePath: distFilePath - }, - metadata: { - hasDist: true, - hasMultipleLocales: false, - isDirectoryStructure: true - }, - paths: { - dist: distFilePath - } - } - - localizedRules.push(localizedPrompt) - } catch (error) { - logger.warn('Failed to process rule from dist', {path: distFilePath, error}) - } - } - } catch (error) { - logger.warn('Failed to scan series directory', {path: seriesDir, error}) - } - } - } catch (error) { - logger.warn('Failed to scan dist directory', {path: distDir, error}) - } - } - for (const error of errors) logger.warn('Failed to read rule from src', {path: error.path, phase: error.phase, error: error.error}) - const promptIndex = new Map() - for (const rule of localizedRules) promptIndex.set(rule.name, rule) - return { - prompts: { - skills: [], - commands: [], - subAgents: [], - rules: localizedRules, - readme: [] - }, - promptIndex, - rules: [...localizedRulesFromSrc.map(r => r.src.default.prompt!).filter(Boolean), ...legacyRules] + rules: localizedRulesFromSrc.map(r => r.src.default.prompt!).filter(Boolean) } } } diff --git a/cli/src/inputs/input-shared-ignore.ts b/cli/src/inputs/input-shared-ignore.ts index 62764844..238c8be1 100644 --- a/cli/src/inputs/input-shared-ignore.ts +++ b/cli/src/inputs/input-shared-ignore.ts @@ -1,28 +1,21 @@ -import type {AIAgentIgnoreConfigFile, CollectedInputContext, InputPluginContext} from '../plugins/plugin-shared' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' -import {AINDEX_FILE_NAMES} from '../plugins/plugin-shared' - -const IGNORE_FILE_NAMES: readonly string[] = [ - AINDEX_FILE_NAMES.QODER_IGNORE, - AINDEX_FILE_NAMES.CURSOR_IGNORE, - AINDEX_FILE_NAMES.WARP_INDEX_IGNORE, - AINDEX_FILE_NAMES.AI_IGNORE, - AINDEX_FILE_NAMES.CODEIUM_IGNORE, - '.kiroignore', - '.traeignore' -] as const +import type {AIAgentIgnoreConfigFile, InputCollectedContext, InputPluginContext} from '../plugins/plugin-core' +import {AbstractInputPlugin} from '../plugins/plugin-core' +import {AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS, resolvePublicDefinitionPath} from '../public-config-paths' export class AIAgentIgnoreInputPlugin extends AbstractInputPlugin { constructor() { super('AIAgentIgnoreInputPlugin') } - collect(ctx: InputPluginContext): Partial { - const {aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) + collect(ctx: InputPluginContext): Partial { + const {workspaceDir, aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) const results: AIAgentIgnoreConfigFile[] = [] - for (const fileName of IGNORE_FILE_NAMES) { - const filePath = ctx.path.join(aindexDir, fileName) + for (const fileName of AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS) { + const filePath = resolvePublicDefinitionPath(aindexDir, fileName, { + command: ctx.runtimeCommand, + workspaceDir + }) if (!ctx.fs.existsSync(filePath)) { this.log.debug({action: 'collect', message: 'Ignore file not found', path: filePath}) continue @@ -32,7 +25,7 @@ export class AIAgentIgnoreInputPlugin extends AbstractInputPlugin { this.log.debug({action: 'collect', message: 'Ignore file is empty', path: filePath}) continue } - results.push({fileName, content}) + results.push({fileName, content, sourcePath: filePath}) this.log.debug({action: 'collect', message: 'Loaded ignore file', path: filePath, fileName}) } diff --git a/cli/src/inputs/input-subagent.test.ts b/cli/src/inputs/input-subagent.test.ts new file mode 100644 index 00000000..10de1f75 --- /dev/null +++ b/cli/src/inputs/input-subagent.test.ts @@ -0,0 +1,137 @@ +import type {InputPluginContext} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {mergeConfig} from '../config' +import {createLogger} from '../plugins/plugin-core' +import {SubAgentInputPlugin} from './input-subagent' + +function createContext(tempWorkspace: string): InputPluginContext { + const options = mergeConfig({workspaceDir: tempWorkspace}) + + return { + logger: createLogger('SubAgentInputPluginTest', 'error'), + fs, + path, + glob, + userConfigOptions: options, + dependencyContext: {} + } as InputPluginContext +} + +describe('subagent input plugin', () => { + it('prefers dist content, and dist also runs mdx-to-md compilation', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const srcDir = path.join(aindexDir, 'subagents') + const distDir = path.join(aindexDir, 'dist', 'subagents') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + + const srcFile = path.join(srcDir, 'demo.src.mdx') + const distFile = path.join(distDir, 'demo.mdx') + fs.writeFileSync(srcFile, '---\ndescription: src\n---\nSubAgent source', 'utf8') + fs.writeFileSync(distFile, '---\ndescription: dist\n---\nexport const x = 1\n\nSubAgent dist', 'utf8') + + const plugin = new SubAgentInputPlugin() + const result = await plugin.collect(createContext(tempWorkspace)) + + expect(result.subAgents?.length ?? 0).toBe(1) + expect(result.subAgents?.[0]?.agentName).toBe('demo') + expect(result.subAgents?.[0]?.content).toContain('SubAgent dist') + expect(result.subAgents?.[0]?.content).not.toContain('SubAgent source') + expect(result.subAgents?.[0]?.content).not.toContain('export const x = 1') + expect(result.subAgents?.[0]?.yamlFrontMatter?.description).toBe('dist') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('extracts directory name as subagent prefix', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-prefix-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const srcDir = path.join(aindexDir, 'subagents', 'qa') + const distDir = path.join(aindexDir, 'dist', 'subagents', 'qa') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + + const srcFile = path.join(srcDir, 'boot.src.mdx') + const distFile = path.join(distDir, 'boot.mdx') + fs.writeFileSync(srcFile, '---\ndescription: qa boot src\n---\nSubAgent source', 'utf8') + fs.writeFileSync(distFile, 'SubAgent dist', 'utf8') + + const plugin = new SubAgentInputPlugin() + const result = await plugin.collect(createContext(tempWorkspace)) + const [subAgent] = result.subAgents ?? [] + + expect(result.subAgents?.length ?? 0).toBe(1) + expect(subAgent?.agentPrefix).toBe('qa') + expect(subAgent?.agentName).toBe('boot') + expect(subAgent?.content).toContain('SubAgent dist') + expect(subAgent?.content).not.toContain('SubAgent source') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('keeps rawMdxContent from dist for output-side recompilation', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-rawmdx-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const srcDir = path.join(aindexDir, 'subagents') + const distDir = path.join(aindexDir, 'dist', 'subagents') + + try { + fs.mkdirSync(srcDir, {recursive: true}) + fs.mkdirSync(distDir, {recursive: true}) + + const srcFile = path.join(srcDir, 'demo.src.mdx') + const distFile = path.join(distDir, 'demo.mdx') + fs.writeFileSync(srcFile, '---\ndescription: src\n---\nSubAgent source', 'utf8') + fs.writeFileSync(distFile, '---\ndescription: dist\n---\nexport const x = 1\n\nSubAgent dist', 'utf8') + + const plugin = new SubAgentInputPlugin() + const result = await plugin.collect(createContext(tempWorkspace)) + const [subAgent] = result.subAgents ?? [] + + expect(subAgent?.rawMdxContent).toContain('export const x = 1') + expect(subAgent?.content).toContain('SubAgent dist') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + + it('loads subagents from dist when the source tree is missing', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-subagent-dist-only-test-')) + const aindexDir = path.join(tempWorkspace, 'aindex') + const distDir = path.join(aindexDir, 'dist', 'subagents') + + try { + fs.mkdirSync(distDir, {recursive: true}) + fs.writeFileSync( + path.join(distDir, 'demo.mdx'), + '---\ndescription: dist only\n---\nDist only subagent', + 'utf8' + ) + + const plugin = new SubAgentInputPlugin() + const result = await plugin.collect(createContext(tempWorkspace)) + + expect(result.subAgents?.length ?? 0).toBe(1) + expect(result.subAgents?.[0]?.agentName).toBe('demo') + expect(result.subAgents?.[0]?.content).toContain('Dist only subagent') + expect(result.subAgents?.[0]?.yamlFrontMatter?.description).toBe('dist only') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/inputs/input-subagent.ts b/cli/src/inputs/input-subagent.ts index dac72a24..817eab28 100644 --- a/cli/src/inputs/input-subagent.ts +++ b/cli/src/inputs/input-subagent.ts @@ -1,42 +1,31 @@ import type { - CollectedInputContext, + InputCollectedContext, InputPluginContext, Locale, - LocalizedSubAgentPrompt, - PluginOptions, - ResolvedBasePaths, - SubAgentPrompt -} from '../plugins/plugin-shared' + SubAgentPrompt, + SubAgentYAMLFrontMatter +} from '../plugins/plugin-core' import { AbstractInputPlugin, - createLocalizedPromptReader -} from '@truenine/plugin-input-shared' -import { + createLocalizedPromptReader, FilePathKind, - PromptKind -} from '../plugins/plugin-shared' + PromptKind, + SourceLocaleExtensions -export interface AgentPrefixInfo { - readonly agentPrefix?: string - readonly agentName: string -} +} from '../plugins/plugin-core' export class SubAgentInputPlugin extends AbstractInputPlugin { constructor() { super('SubAgentInputPlugin') } - private getDistDir(options: Required, resolvedPaths: ResolvedBasePaths): string { - return this.resolveAindexPath(options.aindex.subAgents.dist, resolvedPaths.aindexDir) - } - private createSubAgentPrompt( content: string, _locale: Locale, name: string, - _srcDir: string, distDir: string, - ctx: InputPluginContext + ctx: InputPluginContext, + metadata?: Record ): SubAgentPrompt { const {path} = ctx @@ -45,12 +34,18 @@ export class SubAgentInputPlugin extends AbstractInputPlugin { const parentDirName = slashIndex !== -1 ? normalizedName.slice(0, slashIndex) : void 0 const fileName = slashIndex !== -1 ? normalizedName.slice(slashIndex + 1) : normalizedName - const prefixInfo = this.extractPrefixInfo(fileName, parentDirName) + const baseName = fileName.replace(/\.mdx$/, '') + const underscoreIndex = baseName.indexOf('_') + const agentPrefix = parentDirName ?? (underscoreIndex === -1 ? void 0 : baseName.slice(0, Math.max(0, underscoreIndex))) + const agentName = parentDirName != null || underscoreIndex === -1 + ? baseName + : baseName.slice(Math.max(0, underscoreIndex + 1)) const filePath = path.join(distDir, `${name}.mdx`) const entryName = `${name}.mdx` + const yamlFrontMatter = metadata as SubAgentYAMLFrontMatter | undefined - return { + const prompt: SubAgentPrompt = { type: PromptKind.SubAgent, content, length: content.length, @@ -62,37 +57,23 @@ export class SubAgentInputPlugin extends AbstractInputPlugin { getDirectoryName: () => entryName.replace(/\.mdx$/, ''), getAbsolutePath: () => filePath }, - ...prefixInfo.agentPrefix != null && {agentPrefix: prefixInfo.agentPrefix}, - agentName: prefixInfo.agentName + ...agentPrefix != null && {agentPrefix}, + agentName } as SubAgentPrompt - } - - extractPrefixInfo(fileName: string, parentDirName?: string): AgentPrefixInfo { - const baseName = fileName.replace(/\.mdx$/, '') - - if (parentDirName != null) { - return { - agentPrefix: parentDirName, - agentName: baseName - } - } - - const underscoreIndex = baseName.indexOf('_') - if (underscoreIndex === -1) return {agentName: baseName} + if (yamlFrontMatter == null) return prompt - return { - agentPrefix: baseName.slice(0, Math.max(0, underscoreIndex)), - agentName: baseName.slice(Math.max(0, underscoreIndex + 1)) - } + Object.assign(prompt, {yamlFrontMatter}) + if (yamlFrontMatter.seriName != null) Object.assign(prompt, {seriName: yamlFrontMatter.seriName}) + return prompt } - override async collect(ctx: InputPluginContext): Promise> { + override async collect(ctx: InputPluginContext): Promise> { const {userConfigOptions: options, logger, path, fs, globalScope} = ctx const resolvedPaths = this.resolveBasePaths(options) const srcDir = this.resolveAindexPath(options.aindex.subAgents.src, resolvedPaths.aindexDir) - const distDir = this.getDistDir(options, resolvedPaths) + const distDir = this.resolveAindexPath(options.aindex.subAgents.dist, resolvedPaths.aindexDir) logger.debug('SubAgentInputPlugin collecting', { srcDir, @@ -107,15 +88,15 @@ export class SubAgentInputPlugin extends AbstractInputPlugin { distDir, { kind: PromptKind.SubAgent, - localeExtensions: {zh: '.md', en: '.mdx'}, + localeExtensions: SourceLocaleExtensions, isDirectoryStructure: false, - createPrompt: async (content, locale, name) => this.createSubAgentPrompt( + createPrompt: (content, locale, name, metadata) => this.createSubAgentPrompt( content, locale, name, - srcDir, distDir, - ctx + ctx, + metadata ) } ) @@ -127,30 +108,33 @@ export class SubAgentInputPlugin extends AbstractInputPlugin { for (const error of errors) logger.warn('Failed to read subAgent', {path: error.path, phase: error.phase, error: error.error}) - const legacySubAgents: SubAgentPrompt[] = [] + const flatSubAgents: SubAgentPrompt[] = [] for (const localized of localizedSubAgents) { - const prompt = localized.dist?.prompt ?? localized.src.default.prompt - if (prompt) legacySubAgents.push(prompt) + const distContent = localized.dist + if (distContent?.prompt != null) { + const {prompt: distPrompt, rawMdx} = distContent + flatSubAgents.push(rawMdx == null + ? distPrompt + : {...distPrompt, rawMdxContent: rawMdx}) + continue + } + + const srcPrompt = localized.src.default.prompt + if (srcPrompt != null) { + const {rawMdx} = localized.src.default + flatSubAgents.push(rawMdx == null + ? srcPrompt + : {...srcPrompt, rawMdxContent: rawMdx}) + } } - logger.debug('SubAgentInputPlugin legacy subAgents', { - count: legacySubAgents.length, - agents: legacySubAgents.map(a => a.agentName) + logger.debug('SubAgentInputPlugin flattened subAgents', { + count: flatSubAgents.length, + agents: flatSubAgents.map(a => a.agentName) }) - const promptIndex = new Map() - for (const sub of localizedSubAgents) promptIndex.set(sub.name, sub) - return { - prompts: { - skills: [], - commands: [], - subAgents: localizedSubAgents, - rules: [], - readme: [] - }, - promptIndex, - subAgents: legacySubAgents + subAgents: flatSubAgents } } } diff --git a/cli/src/inputs/input-vscode-config.ts b/cli/src/inputs/input-vscode-config.ts index 86a4282c..bda10641 100644 --- a/cli/src/inputs/input-vscode-config.ts +++ b/cli/src/inputs/input-vscode-config.ts @@ -1,45 +1,24 @@ -import type {CollectedInputContext, InputPluginContext, ProjectIDEConfigFile} from '../plugins/plugin-shared' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' -import {FilePathKind, IDEKind} from '../plugins/plugin-shared' - -function readIdeConfigFile( - type: T, - relativePath: string, - aindexDir: string, - fs: typeof import('node:fs'), - path: typeof import('node:path') -): ProjectIDEConfigFile | undefined { - const absPath = path.join(aindexDir, relativePath) - if (!(fs.existsSync(absPath) && fs.statSync(absPath).isFile())) return void 0 - - const content = fs.readFileSync(absPath, 'utf8') - return { - type, - content, - length: content.length, - filePathKind: FilePathKind.Absolute, - dir: { - pathKind: FilePathKind.Absolute, - path: absPath, - getDirectoryName: () => path.basename(absPath) - } - } -} +import type {InputCollectedContext, InputPluginContext, ProjectIDEConfigFile} from '../plugins/plugin-core' +import {AbstractInputPlugin, IDEKind} from '../plugins/plugin-core' +import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' export class VSCodeConfigInputPlugin extends AbstractInputPlugin { constructor() { super('VSCodeConfigInputPlugin') } - collect(ctx: InputPluginContext): Partial { - const {userConfigOptions, fs, path} = ctx - const {aindexDir} = this.resolveBasePaths(userConfigOptions) + collect(ctx: InputPluginContext): Partial { + const {userConfigOptions, fs} = ctx + const {workspaceDir, aindexDir} = this.resolveBasePaths(userConfigOptions) const files = ['.vscode/settings.json', '.vscode/extensions.json'] const vscodeConfigFiles: ProjectIDEConfigFile[] = [] for (const relativePath of files) { - const file = readIdeConfigFile(IDEKind.VSCode, relativePath, aindexDir, fs, path) + const file = readPublicIdeConfigDefinitionFile(IDEKind.VSCode, relativePath, aindexDir, fs, { + command: ctx.runtimeCommand, + workspaceDir + }) if (file != null) vscodeConfigFiles.push(file) } diff --git a/cli/src/inputs/input-workspace.ts b/cli/src/inputs/input-workspace.ts index 32a0f5a2..1e7fec3e 100644 --- a/cli/src/inputs/input-workspace.ts +++ b/cli/src/inputs/input-workspace.ts @@ -1,14 +1,13 @@ -import type {CollectedInputContext, InputPluginContext, Workspace} from '../plugins/plugin-shared' +import type {InputCollectedContext, InputPluginContext, Workspace} from '../plugins/plugin-core' import * as path from 'node:path' -import {AbstractInputPlugin} from '@truenine/plugin-input-shared' -import {FilePathKind} from '../plugins/plugin-shared' +import {AbstractInputPlugin, FilePathKind} from '../plugins/plugin-core' export class WorkspaceInputPlugin extends AbstractInputPlugin { constructor() { super('WorkspaceInputPlugin') } - collect(ctx: InputPluginContext): Partial { + collect(ctx: InputPluginContext): Partial { const {userConfigOptions: options} = ctx const {workspaceDir, aindexDir} = this.resolveBasePaths(options) diff --git a/cli/src/lib.rs b/cli/src/lib.rs index 7b2495e7..28508a35 100644 --- a/cli/src/lib.rs +++ b/cli/src/lib.rs @@ -1,6 +1,6 @@ //! tnmsc library — exposes core functionality for GUI backend direct invocation. //! -//! Pure Rust commands: version, load_config, config_show, outdated +//! Pure Rust commands: version, load_config, config_show //! Bridge commands (Node.js): run_bridge_command pub mod bridge; @@ -42,15 +42,6 @@ pub struct BridgeCommandResult { pub exit_code: i32, } -/// Result of the `outdated` check. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct OutdatedResult { - pub current_version: String, - pub latest_version: Option, - pub is_outdated: bool, -} - // --------------------------------------------------------------------------- // Public API functions // --------------------------------------------------------------------------- @@ -71,33 +62,6 @@ pub fn config_show(cwd: &Path) -> Result { serde_json::to_string_pretty(&result.config).map_err(CliError::from) } -/// Check whether the current CLI version is outdated against the npm registry. -pub fn outdated() -> Result { - let current = env!("CARGO_PKG_VERSION").to_string(); - - let output = std::process::Command::new("npm") - .args(["view", "@truenine/memory-sync-cli", "version", "--json"]) - .output(); - - match output { - Ok(out) if out.status.success() => { - let raw = String::from_utf8_lossy(&out.stdout); - let latest = raw.trim().trim_matches('"').to_string(); - let is_outdated = latest != current; - Ok(OutdatedResult { - current_version: current, - latest_version: Some(latest), - is_outdated, - }) - } - _ => Ok(OutdatedResult { - current_version: current, - latest_version: None, - is_outdated: false, - }), - } -} - /// Execute a bridge command (execute, dry-run, clean, plugins) via Node.js subprocess. /// /// The subprocess output is captured (piped) and returned as a [`BridgeCommandResult`]. @@ -168,18 +132,6 @@ mod property_tests { prop_assert!(parsed.is_ok(), "config_show output should be valid JSON, got: {}", json_str); } - // ---- outdated() ---- - - /// outdated() always returns Ok(OutdatedResult) with current_version matching CARGO_PKG_VERSION. - #[test] - fn prop_outdated_current_version_matches(_seed in 0u64..20) { - let result = outdated(); - prop_assert!(result.is_ok(), "outdated should return Ok, got: {:?}", result.err()); - let out = result.unwrap(); - prop_assert_eq!(out.current_version.as_str(), env!("CARGO_PKG_VERSION"), - "current_version should match CARGO_PKG_VERSION"); - } - // ---- BridgeCommandResult structural property ---- /// BridgeCommandResult fields are typed and accessible for any combination of diff --git a/cli/src/log.ts b/cli/src/log.ts deleted file mode 100644 index 39aa1709..00000000 --- a/cli/src/log.ts +++ /dev/null @@ -1,9 +0,0 @@ -export { - createLogger, - getGlobalLogLevel, - setGlobalLogLevel -} from '@truenine/logger' -export type { - ILogger, - LogLevel -} from '@truenine/logger' diff --git a/cli/src/main.rs b/cli/src/main.rs index 9ec491c2..dc5cbf5b 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,6 +1,6 @@ //! tnmsc — Rust CLI entry point. //! -//! Pure Rust commands: help, version, outdated, config, config-show +//! Pure Rust commands: help, version, config, config-show //! Bridge commands (Node.js): execute, dry-run, clean, plugins mod cli; @@ -32,7 +32,6 @@ fn main() -> ExitCode { // Pure Rust commands ResolvedCommand::Help => tnmsc::commands::help::execute(), ResolvedCommand::Version => tnmsc::commands::version::execute(), - ResolvedCommand::Outdated => tnmsc::commands::outdated::execute(), ResolvedCommand::Config(pairs) => tnmsc::commands::config_cmd::execute(&pairs), ResolvedCommand::ConfigShow => tnmsc::commands::config_show::execute(), diff --git a/cli/src/pipeline/CliArgumentParser.test.ts b/cli/src/pipeline/CliArgumentParser.test.ts new file mode 100644 index 00000000..ad49ff88 --- /dev/null +++ b/cli/src/pipeline/CliArgumentParser.test.ts @@ -0,0 +1,9 @@ +import {describe, expect, it} from 'vitest' +import {parseArgs, resolveCommand} from './CliArgumentParser' + +describe('cli argument parser', () => { + it('resolves the init subcommand to InitCommand', () => { + const command = resolveCommand(parseArgs(['init'])) + expect(command.name).toBe('init') + }) +}) diff --git a/cli/src/pipeline/CliArgumentParser.ts b/cli/src/pipeline/CliArgumentParser.ts index 7476c347..ac5c1b60 100644 --- a/cli/src/pipeline/CliArgumentParser.ts +++ b/cli/src/pipeline/CliArgumentParser.ts @@ -5,13 +5,23 @@ * Refactored to use Command Factory pattern for command creation */ -import type {Command} from '@/commands' -import {createDefaultCommandRegistry} from '@/commands/CommandRegistryFactory' +import type {Command} from '@/commands/Command' +import {FactoryPriority} from '@/commands/CommandFactory' +import {CommandRegistry} from '@/commands/CommandRegistry' +import {CleanCommandFactory} from '@/commands/factories/CleanCommandFactory' +import {ConfigCommandFactory} from '@/commands/factories/ConfigCommandFactory' +import {DryRunCommandFactory} from '@/commands/factories/DryRunCommandFactory' +import {ExecuteCommandFactory} from '@/commands/factories/ExecuteCommandFactory' +import {HelpCommandFactory} from '@/commands/factories/HelpCommandFactory' +import {InitCommandFactory} from '@/commands/factories/InitCommandFactory' +import {PluginsCommandFactory} from '@/commands/factories/PluginsCommandFactory' +import {UnknownCommandFactory} from '@/commands/factories/UnknownCommandFactory' +import {VersionCommandFactory} from '@/commands/factories/VersionCommandFactory' /** * Valid subcommands for the CLI */ -export type Subcommand = 'help' | 'version' | 'outdated' | 'init' | 'dry-run' | 'clean' | 'config' | 'plugins' +export type Subcommand = 'help' | 'version' | 'init' | 'dry-run' | 'clean' | 'config' | 'plugins' /** * Valid log levels for the CLI @@ -29,7 +39,6 @@ export interface ParsedCliArgs { readonly jsonFlag: boolean readonly showFlag: boolean readonly logLevel: LogLevel | undefined - readonly logLevelFlags: readonly LogLevel[] readonly setOption: readonly [key: string, value: string][] readonly unknownCommand: string | undefined readonly positional: readonly string[] @@ -39,7 +48,7 @@ export interface ParsedCliArgs { /** * Valid subcommands set for quick lookup */ -const VALID_SUBCOMMANDS: ReadonlySet = new Set(['help', 'version', 'outdated', 'init', 'dry-run', 'clean', 'config', 'plugins']) +const VALID_SUBCOMMANDS: ReadonlySet = new Set(['help', 'version', 'init', 'dry-run', 'clean', 'config', 'plugins']) /** * Log level flags mapping @@ -100,28 +109,11 @@ function isScriptOrPackage(arg: string): boolean { return /^(?:@[\w-]+\/)?[\w-]+$/.test(arg) && !arg.startsWith('-') // npx executed package name } -/** - * Resolve log level from parsed arguments. - * When multiple log level flags are provided, returns the most verbose level. - * Priority: trace > debug > info > warn > error - */ -export function resolveLogLevel(args: ParsedCliArgs): LogLevel | undefined { - const {logLevelFlags} = args - - if (logLevelFlags.length === 0) return void 0 - - let mostVerbose: LogLevel = logLevelFlags[0]! // Find the most verbose level (lowest priority number) - let lowestPriority = LOG_LEVEL_PRIORITY.get(mostVerbose) ?? 4 - - for (const level of logLevelFlags) { - const priority = LOG_LEVEL_PRIORITY.get(level) ?? 4 - if (priority < lowestPriority) { - lowestPriority = priority - mostVerbose = level - } - } - - return mostVerbose +function pickMoreVerbose(current: LogLevel | undefined, candidate: LogLevel): LogLevel { + if (current == null) return candidate + const currentPriority = LOG_LEVEL_PRIORITY.get(current) ?? 4 + const candidatePriority = LOG_LEVEL_PRIORITY.get(candidate) ?? 4 + return candidatePriority < currentPriority ? candidate : current } /** @@ -136,7 +128,6 @@ export function parseArgs(args: readonly string[]): ParsedCliArgs { jsonFlag: boolean showFlag: boolean logLevel: LogLevel | undefined - logLevelFlags: LogLevel[] setOption: [key: string, value: string][] unknownCommand: string | undefined positional: string[] @@ -149,7 +140,6 @@ export function parseArgs(args: readonly string[]): ParsedCliArgs { jsonFlag: false, showFlag: false, logLevel: void 0, - logLevelFlags: [], setOption: [], unknownCommand: void 0, positional: [], @@ -173,8 +163,7 @@ export function parseArgs(args: readonly string[]): ParsedCliArgs { const logLevel = LOG_LEVEL_FLAGS.get(key) // Check log level flags if (logLevel != null) { - result.logLevelFlags.push(logLevel) - result.logLevel = logLevel + result.logLevel = pickMoreVerbose(result.logLevel, logLevel) continue } @@ -240,6 +229,24 @@ export function parseArgs(args: readonly string[]): ParsedCliArgs { */ let commandRegistry: ReturnType | undefined +function createDefaultCommandRegistry(): CommandRegistry { + const registry = new CommandRegistry() + + registry.register(new VersionCommandFactory()) // High priority: flag-based commands + registry.register(new HelpCommandFactory()) + registry.register(new UnknownCommandFactory()) + + registry.registerWithPriority(new InitCommandFactory(), FactoryPriority.Subcommand) + registry.registerWithPriority(new DryRunCommandFactory(), FactoryPriority.Subcommand) + registry.registerWithPriority(new CleanCommandFactory(), FactoryPriority.Subcommand) + registry.registerWithPriority(new PluginsCommandFactory(), FactoryPriority.Subcommand) + registry.registerWithPriority(new ConfigCommandFactory(), FactoryPriority.Subcommand) + + registry.registerWithPriority(new ExecuteCommandFactory(), FactoryPriority.Subcommand) // Lowest priority: default/catch-all command + + return registry +} + /** * Get or create the command registry singleton */ @@ -248,13 +255,6 @@ function getCommandRegistry(): ReturnType { return commandRegistry } -/** - * Reset the command registry singleton (useful for testing) - */ -export function resetCommandRegistry(): void { - commandRegistry = void 0 -} - /** * Resolve command from parsed CLI arguments using factory pattern * Delegates command creation to registered factories based on priority diff --git a/cli/src/pipeline/ContextMerger.ts b/cli/src/pipeline/ContextMerger.ts index cc053aed..ecc485f5 100644 --- a/cli/src/pipeline/ContextMerger.ts +++ b/cli/src/pipeline/ContextMerger.ts @@ -1,9 +1,9 @@ /** * Context Merger Module - * Handles merging of partial CollectedInputContext objects + * Handles merging of partial InputCollectedContext objects */ -import type {CollectedInputContext, Workspace} from '../plugins/plugin-shared' +import type {InputCollectedContext, Workspace} from '../plugins/plugin-core' /** * Merge strategy types for context fields @@ -15,11 +15,11 @@ type MergeStrategy = 'concat' | 'override' | 'mergeProjects' */ interface FieldConfig { readonly strategy: MergeStrategy - readonly getter: (ctx: Partial) => T | undefined + readonly getter: (ctx: Partial) => T | undefined } /** - * Merge configuration for all CollectedInputContext fields + * Merge configuration for all InputCollectedContext fields */ const FIELD_CONFIGS: Record> = { workspace: { @@ -128,49 +128,23 @@ function mergeField( } /** - * Build merge result object from merged fields - */ -function buildMergeResult( - mergedFields: Map -): Partial { - const result: Record = {} - - for (const [key, value] of mergedFields) { - if (value != null) result[key] = value - } - - return result as Partial -} - -/** - * Merge two partial CollectedInputContext objects + * Merge two partial InputCollectedContext objects * Uses configuration-driven approach to reduce code duplication */ export function mergeContexts( - base: Partial, - addition: Partial -): Partial { - const mergedFields = new Map() + base: Partial, + addition: Partial +): Partial { + const result: Record = {} for (const [fieldName, config] of Object.entries(FIELD_CONFIGS)) { // Process each configured field const baseValue = config.getter(base) const additionValue = config.getter(addition) const mergedValue = mergeField(baseValue, additionValue, config.strategy) - mergedFields.set(fieldName, mergedValue) + if (mergedValue != null) result[fieldName] = mergedValue } - return buildMergeResult(mergedFields) -} - -/** - * Legacy merge function for backwards compatibility - * Uses the optimized configuration-driven approach - */ -export function mergeContextsLegacy( - base: Partial, - addition: Partial -): Partial { - return mergeContexts(base, addition) + return result as Partial } /** @@ -178,90 +152,17 @@ export function mergeContextsLegacy( */ export function buildDependencyContext( plugin: {dependsOn?: readonly string[]}, - outputsByPlugin: Map>, - mergeFn: (base: Partial, addition: Partial) => Partial -): Partial { + outputsByPlugin: Map>, + mergeFn: (base: Partial, addition: Partial) => Partial +): Partial { const deps = plugin.dependsOn ?? [] if (deps.length === 0) return {} - const allDeps = collectTransitiveDependencies(plugin, outputsByPlugin) - - let merged: Partial = {} - for (const depName of allDeps) { - const depOutput = outputsByPlugin.get(depName) - if (depOutput != null) merged = mergeFn(merged, depOutput) - } - - return merged -} - -/** - * Collect transitive dependencies for a plugin - */ -function collectTransitiveDependencies( - plugin: {dependsOn?: readonly string[]}, - outputsByPlugin: Map> -): string[] { const visited = new Set() - const result: string[] = [] - - const visit = (deps: readonly string[]): void => { - for (const dep of deps) { - if (visited.has(dep)) continue - visited.add(dep) - - const depOutput = outputsByPlugin.get(dep) - if (depOutput != null) result.push(dep) - } - } - - visit(plugin.dependsOn ?? []) - return result -} - -/** - * Collect transitive dependencies for a plugin with full dependency resolution - */ -export function collectTransitiveDependenciesFull( - plugin: {dependsOn?: readonly string[]}, - _outputsByPlugin: Map>, - pluginRegistry: Map -): string[] { - const visited = new Set() - const result: string[] = [] - - const visit = (deps: readonly string[]): void => { - for (const dep of deps) { - if (visited.has(dep)) continue - visited.add(dep) - - result.push(dep) - - const depPlugin = pluginRegistry.get(dep) // Recursively visit dependencies of this dependency - if (depPlugin != null) visit(depPlugin.dependsOn ?? []) - } - } - - visit(plugin.dependsOn ?? []) - return result -} - -/** - * Build dependency context with full transitive dependency resolution - */ -export function buildDependencyContextFull( - plugin: {name: string, dependsOn?: readonly string[]}, - outputsByPlugin: Map>, - pluginRegistry: Map, - mergeFn: (base: Partial, addition: Partial) => Partial -): Partial { - const deps = plugin.dependsOn ?? [] - if (deps.length === 0) return {} - - const allDeps = collectTransitiveDependenciesFull(plugin, outputsByPlugin, pluginRegistry) - - let merged: Partial = {} - for (const depName of allDeps) { + let merged: Partial = {} + for (const depName of deps) { + if (visited.has(depName)) continue + visited.add(depName) const depOutput = outputsByPlugin.get(depName) if (depOutput != null) merged = mergeFn(merged, depOutput) } diff --git a/cli/src/pipeline/PluginDependencyResolver.ts b/cli/src/pipeline/PluginDependencyResolver.ts index 60fa25ce..44c6c185 100644 --- a/cli/src/pipeline/PluginDependencyResolver.ts +++ b/cli/src/pipeline/PluginDependencyResolver.ts @@ -3,37 +3,8 @@ * Handles dependency graph building, validation, and topological sorting */ -import type {Plugin, PluginKind} from '../plugins/plugin-shared' -import {CircularDependencyError, MissingDependencyError} from '../plugins/plugin-shared' - -/** - * Build dependency graph from plugins - */ -export function buildDependencyGraph( - plugins: readonly Plugin[] -): Map { - const graph = new Map() - for (const plugin of plugins) { - const deps = plugin.dependsOn ?? [] - graph.set(plugin.name, [...deps]) - } - return graph -} - -/** - * Validate that all plugin dependencies exist - */ -export function validateDependencies( - plugins: readonly Plugin[] -): void { - const pluginNames = new Set(plugins.map(p => p.name)) - for (const plugin of plugins) { - const deps = plugin.dependsOn ?? [] - for (const dep of deps) { - if (!pluginNames.has(dep)) throw new MissingDependencyError(plugin.name, dep) - } - } -} +import type {Plugin, PluginKind} from '../plugins/plugin-core' +import {CircularDependencyError, MissingDependencyError} from '../plugins/plugin-core' /** * Find cycle path in dependency graph for error reporting @@ -95,7 +66,13 @@ function findCyclePath( export function topologicalSort( plugins: readonly Plugin[] ): Plugin[] { - validateDependencies(plugins) // Validate dependencies first + const pluginNames = new Set(plugins.map(p => p.name)) // Validate dependencies first + for (const plugin of plugins) { + const deps = plugin.dependsOn ?? [] + for (const dep of deps) { + if (!pluginNames.has(dep)) throw new MissingDependencyError(plugin.name, dep) + } + } const pluginMap = new Map>() // Build plugin map for quick lookup for (const plugin of plugins) pluginMap.set(plugin.name, plugin) diff --git a/cli/src/pipeline/index.ts b/cli/src/pipeline/index.ts deleted file mode 100644 index 684c7292..00000000 --- a/cli/src/pipeline/index.ts +++ /dev/null @@ -1,23 +0,0 @@ -export { // Export argument parsing - extractUserArgs, - type LogLevel, - parseArgs, - type ParsedCliArgs, - resolveCommand, - resolveLogLevel, - type Subcommand -} from './CliArgumentParser' - -export { // Export context merging - buildDependencyContext, - buildDependencyContextFull, - collectTransitiveDependenciesFull, - mergeContexts, - mergeContextsLegacy -} from './ContextMerger' - -export { // Export dependency resolution - buildDependencyGraph, - topologicalSort, - validateDependencies -} from './PluginDependencyResolver' diff --git a/cli/src/plugin-runtime.ts b/cli/src/plugin-runtime.ts index 6049032f..40b54cc6 100644 --- a/cli/src/plugin-runtime.ts +++ b/cli/src/plugin-runtime.ts @@ -1,4 +1,4 @@ -import type {OutputCleanContext, OutputWriteContext} from './plugins/plugin-shared' +import type {OutputCleanContext, OutputWriteContext} from './plugins/plugin-core' /** * Plugin Runtime Entry Point * @@ -10,22 +10,19 @@ import type {OutputCleanContext, OutputWriteContext} from './plugins/plugin-shar * * Subcommands: execute, dry-run, clean, plugins */ -import type {Command, CommandContext} from '@/commands' +import type {Command, CommandContext} from '@/commands/Command' import type {PipelineConfig} from '@/config' import * as fs from 'node:fs' import * as path from 'node:path' import process from 'node:process' import glob from 'fast-glob' -import { - CleanCommand, - DryRunCleanCommand, - DryRunOutputCommand, - ExecuteCommand, - JsonOutputCommand, - PluginsCommand -} from '@/commands' -import userPluginConfigPromise from './plugin.config' -import {createLogger, setGlobalLogLevel} from './plugins/plugin-shared' +import {CleanCommand} from '@/commands/CleanCommand' +import {DryRunCleanCommand} from '@/commands/DryRunCleanCommand' +import {DryRunOutputCommand} from '@/commands/DryRunOutputCommand' +import {ExecuteCommand} from '@/commands/ExecuteCommand' +import {JsonOutputCommand} from '@/commands/JsonOutputCommand' +import {PluginsCommand} from '@/commands/PluginsCommand' +import {createLogger, setGlobalLogLevel} from './plugins/plugin-core' /** * Parse runtime arguments. @@ -64,6 +61,7 @@ async function main(): Promise { if (json) setGlobalLogLevel('silent') + const {default: userPluginConfigPromise} = await import('./plugin.config') const userPluginConfig: PipelineConfig = await userPluginConfigPromise let command = resolveRuntimeCommand(subcommand, dryRun) @@ -81,7 +79,8 @@ async function main(): Promise { fs, path, glob, - collectedInputContext: context, + collectedOutputContext: context, + pluginOptions: userConfigOptions, dryRun: dry }) @@ -90,7 +89,7 @@ async function main(): Promise { fs, path, glob, - collectedInputContext: context, + collectedOutputContext: context, dryRun: dry, registeredPluginNames: [...outputPlugins].map(p => p.name) }) @@ -98,17 +97,34 @@ async function main(): Promise { const commandCtx: CommandContext = { logger, outputPlugins: [...outputPlugins], - collectedInputContext: context, + collectedOutputContext: context, userConfigOptions, createCleanContext, createWriteContext } - await command.execute(commandCtx) + const result = await command.execute(commandCtx) + if (!result.success) process.exit(1) +} + +function writeJsonFailure(errorMessage: string): void { + process.stdout.write(`${JSON.stringify({ + success: false, + filesAffected: 0, + dirsAffected: 0, + message: errorMessage, + pluginResults: [], + errors: [errorMessage] + })}\n`) } main().catch((e: unknown) => { const errorMessage = e instanceof Error ? e.message : String(e) + const {json} = parseRuntimeArgs(process.argv) + if (json) { + writeJsonFailure(errorMessage) + process.exit(1) + } const logger = createLogger('plugin-runtime', 'error') logger.error('unhandled error', {error: errorMessage}) process.exit(1) diff --git a/cli/src/plugin.config.ts b/cli/src/plugin.config.ts index 3e717f92..3a928460 100644 --- a/cli/src/plugin.config.ts +++ b/cli/src/plugin.config.ts @@ -1,3 +1,4 @@ +import process from 'node:process' import {GenericSkillsOutputPlugin} from '@truenine/plugin-agentskills-compact' import {AgentsOutputPlugin} from '@truenine/plugin-agentsmd' import {ClaudeCodeCLIOutputPlugin} from '@truenine/plugin-claude-code-cli' @@ -40,46 +41,49 @@ import { import {TraeCNIDEOutputPlugin} from '@/plugins/plugin-trae-cn-ide' export default defineConfig({ - plugins: [ - new AgentsOutputPlugin(), - new ClaudeCodeCLIOutputPlugin(), - new CodexCLIOutputPlugin(), - new JetBrainsAIAssistantCodexOutputPlugin(), - new DroidCLIOutputPlugin(), - new GeminiCLIOutputPlugin(), - new GenericSkillsOutputPlugin(), - new OpencodeCLIOutputPlugin(), - new QoderIDEPluginOutputPlugin(), - new TraeIDEOutputPlugin(), - new TraeCNIDEOutputPlugin(), - new WarpIDEOutputPlugin(), - new WindsurfOutputPlugin(), - new CursorOutputPlugin(), - new GitExcludeOutputPlugin(), + pipelineArgs: process.argv, + pluginOptions: { + plugins: [ + new AgentsOutputPlugin(), + new ClaudeCodeCLIOutputPlugin(), + new CodexCLIOutputPlugin(), + new JetBrainsAIAssistantCodexOutputPlugin(), + new DroidCLIOutputPlugin(), + new GeminiCLIOutputPlugin(), + new GenericSkillsOutputPlugin(), + new OpencodeCLIOutputPlugin(), + new QoderIDEPluginOutputPlugin(), + new TraeIDEOutputPlugin(), + new TraeCNIDEOutputPlugin(), + new WarpIDEOutputPlugin(), + new WindsurfOutputPlugin(), + new CursorOutputPlugin(), + new GitExcludeOutputPlugin(), - new JetBrainsIDECodeStyleConfigOutputPlugin(), - new EditorConfigOutputPlugin(), - new VisualStudioCodeIDEConfigOutputPlugin(), - new ReadmeMdConfigFileOutputPlugin(), + new JetBrainsIDECodeStyleConfigOutputPlugin(), + new EditorConfigOutputPlugin(), + new VisualStudioCodeIDEConfigOutputPlugin(), + new ReadmeMdConfigFileOutputPlugin(), - new SkillNonSrcFileSyncEffectInputPlugin(), // Effect Input Plugins (executed in priority order: 10, 20, 30) - new OrphanFileCleanupEffectInputPlugin(), - new MarkdownWhitespaceCleanupEffectInputPlugin(), + new SkillNonSrcFileSyncEffectInputPlugin(), // Effect Input Plugins (executed in priority order: 10, 20, 30) + new OrphanFileCleanupEffectInputPlugin(), + new MarkdownWhitespaceCleanupEffectInputPlugin(), - new WorkspaceInputPlugin(), - new AindexInputPlugin(), - new VSCodeConfigInputPlugin(), - new JetBrainsConfigInputPlugin(), - new EditorConfigInputPlugin(), - new SkillInputPlugin(), - new CommandInputPlugin(), - new SubAgentInputPlugin(), - new RuleInputPlugin(), - new GlobalMemoryInputPlugin(), - new ProjectPromptInputPlugin(), - new ReadmeMdInputPlugin(), - new GitIgnoreInputPlugin(), - new GitExcludeInputPlugin(), - new AIAgentIgnoreInputPlugin() - ] + new WorkspaceInputPlugin(), + new AindexInputPlugin(), + new VSCodeConfigInputPlugin(), + new JetBrainsConfigInputPlugin(), + new EditorConfigInputPlugin(), + new SkillInputPlugin(), + new CommandInputPlugin(), + new SubAgentInputPlugin(), + new RuleInputPlugin(), + new GlobalMemoryInputPlugin(), + new ProjectPromptInputPlugin(), + new ReadmeMdInputPlugin(), + new GitIgnoreInputPlugin(), + new GitExcludeInputPlugin(), + new AIAgentIgnoreInputPlugin() + ] + } }) diff --git a/cli/src/plugins/AgentsOutputPlugin.ts b/cli/src/plugins/AgentsOutputPlugin.ts new file mode 100644 index 00000000..b8f8631f --- /dev/null +++ b/cli/src/plugins/AgentsOutputPlugin.ts @@ -0,0 +1,84 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin} from './plugin-core' + +const PROJECT_MEMORY_FILE = 'AGENTS.md' + +export class AgentsOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('AgentsOutputPlugin', { + outputFileName: PROJECT_MEMORY_FILE, + cleanup: { + delete: { + project: { + files: [PROJECT_MEMORY_FILE] + } + } + }, + capabilities: { + prompt: { + scopes: ['project'], + singleScope: false + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const results: OutputFileDeclaration[] = [] + const {projects} = ctx.collectedOutputContext.workspace + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project'])) + if (!activePromptScopes.has('project')) return results + + for (const [projectIndex, project] of projects.entries()) { + if (project.rootMemoryPrompt != null && project.dirFromWorkspacePath != null) { + results.push({ + path: this.resolveFullPath(project.dirFromWorkspacePath), + scope: 'project', + source: {type: 'projectRootMemory', projectIndex} + }) + } + + if (project.childMemoryPrompts != null) { + for (const [childIndex, child] of project.childMemoryPrompts.entries()) { + results.push({ + path: this.resolveFullPath(child.dir), + scope: 'project', + source: {type: 'projectChildMemory', projectIndex, childIndex} + }) + } + } + } + + return results + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + const {projects} = ctx.collectedOutputContext.workspace + const source = declaration.source as {type?: string, projectIndex?: number, childIndex?: number} + const projectIndex = source.projectIndex ?? -1 + if (projectIndex < 0 || projectIndex >= projects.length) throw new Error(`Invalid project index in declaration for ${this.name}`) + + const project = projects[projectIndex] + if (project == null) throw new Error(`Project not found for declaration in ${this.name}`) + + if (source.type === 'projectRootMemory') { + if (project.rootMemoryPrompt == null) throw new Error(`Root memory prompt missing for project index ${projectIndex}`) + return project.rootMemoryPrompt.content as string + } + + if (source.type === 'projectChildMemory') { + const childIndex = source.childIndex ?? -1 + const child = project.childMemoryPrompts?.[childIndex] + if (child == null) throw new Error(`Child memory prompt missing for project ${projectIndex}, child ${childIndex}`) + return child.content as string + } + + throw new Error(`Unsupported declaration source for ${this.name}`) + } +} diff --git a/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts b/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts new file mode 100644 index 00000000..01186796 --- /dev/null +++ b/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts @@ -0,0 +1,82 @@ +import type {RulePrompt} from './plugin-core' +import {doubleQuoted} from '@truenine/md-compiler/markdown' +import {AbstractOutputPlugin} from './plugin-core' + +const PROJECT_MEMORY_FILE = 'CLAUDE.md' +const GLOBAL_CONFIG_DIR = '.claude' +const COMMANDS_SUBDIR = 'commands' +const AGENTS_SUBDIR = 'agents' +const SKILLS_SUBDIR = 'skills' + +/** + * Output plugin for Claude Code CLI. + * + * Outputs rules to `.claude/rules/` directory with frontmatter format. + * + * @see https://github.com/anthropics/claude-code/issues/26868 + * Known bug: Claude Code CLI has issues with `.claude/rules` directory handling. + * This may affect rule loading behavior in certain scenarios. + */ +export class ClaudeCodeCLIOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('ClaudeCodeCLIOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: PROJECT_MEMORY_FILE, + toolPreset: 'claudeCode', + commands: { + subDir: COMMANDS_SUBDIR, + transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + }, + subagents: { + subDir: AGENTS_SUBDIR, + sourceScopes: ['project'], + includePrefix: true, + linkSymbol: '-', + ext: '.md' + }, + skills: { + subDir: SKILLS_SUBDIR + }, + rules: { + transformFrontMatter: (rule: RulePrompt) => ({paths: rule.globs.map(doubleQuoted)}) + }, + cleanup: { + delete: { + project: { + files: [PROJECT_MEMORY_FILE], + dirs: ['.claude/rules', '.claude/commands', '.claude/agents', '.claude/skills'] + }, + workspace: { + dirs: ['.claude/rules', '.claude/commands', '.claude/agents', '.claude/skills'] + }, + global: { + files: ['.claude/CLAUDE.md'], + dirs: ['.claude/rules', '.claude/commands', '.claude/agents', '.claude/skills'] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + rules: { + scopes: ['project', 'workspace', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + subagents: { + scopes: ['project'], + singleScope: true + }, + skills: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + } + } + }) + } +} diff --git a/cli/src/plugins/CodexCLIOutputPlugin.ts b/cli/src/plugins/CodexCLIOutputPlugin.ts new file mode 100644 index 00000000..4ed7c190 --- /dev/null +++ b/cli/src/plugins/CodexCLIOutputPlugin.ts @@ -0,0 +1,104 @@ +import type { + CommandPrompt, + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import * as path from 'node:path' +import {AbstractOutputPlugin, filterByProjectConfig, PLUGIN_NAMES} from './plugin-core' + +const PROJECT_MEMORY_FILE = 'AGENTS.md' +const GLOBAL_CONFIG_DIR = '.codex' +const PROMPTS_SUBDIR = 'prompts' + +type CodexOutputSource + = {readonly kind: 'globalMemory', readonly content: string} + | {readonly kind: 'command', readonly command: CommandPrompt} + +export class CodexCLIOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('CodexCLIOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: PROJECT_MEMORY_FILE, + commands: { + subDir: PROMPTS_SUBDIR, + transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + }, + cleanup: { + delete: { + global: { + files: ['.codex/AGENTS.md'], + dirs: ['.codex/prompts'] + } + }, + protect: { + global: { + dirs: ['.codex/skills/.system'] + } + } + }, + dependsOn: [PLUGIN_NAMES.AgentsOutput], + capabilities: { + prompt: { + scopes: ['global'], + singleScope: false + }, + commands: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const {globalMemory, commands} = ctx.collectedOutputContext + const globalDir = this.getGlobalConfigDir() + const declarations: OutputFileDeclaration[] = [] + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['global'])) + + if (globalMemory != null && activePromptScopes.has('global')) { + declarations.push({ + path: path.join(globalDir, PROJECT_MEMORY_FILE), + scope: 'global', + source: { + kind: 'globalMemory', + content: globalMemory.content as string + } satisfies CodexOutputSource + }) + } + + if (commands == null || commands.length === 0) return declarations + + const projectConfig = this.resolvePromptSourceProjectConfig(ctx) + const transformOptions = this.getTransformOptionsFromContext(ctx) + const scopedCommands = this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, cmd => this.resolveCommandSourceScope(cmd), this.getTopicScopeOverride(ctx, 'commands')) + if (scopedCommands.items.length === 0) return declarations + + const filteredCommands = filterByProjectConfig(scopedCommands.items, projectConfig, 'commands') + for (const cmd of filteredCommands) { + const fileName = this.transformCommandName(cmd, transformOptions) + declarations.push({ + path: path.join(globalDir, PROMPTS_SUBDIR, fileName), + scope: 'global', + source: { + kind: 'command', + command: cmd + } satisfies CodexOutputSource + }) + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as CodexOutputSource + + if (source.kind === 'globalMemory') return source.content + if (source.kind === 'command') return this.buildCommandContent(source.command) + + throw new Error(`Unsupported declaration source for ${this.name}`) + } +} diff --git a/cli/src/plugins/CursorOutputPlugin.test.ts b/cli/src/plugins/CursorOutputPlugin.test.ts new file mode 100644 index 00000000..ca3aea32 --- /dev/null +++ b/cli/src/plugins/CursorOutputPlugin.test.ts @@ -0,0 +1,79 @@ +import type {OutputCleanContext} from './plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {CursorOutputPlugin} from './CursorOutputPlugin' +import {FilePathKind} from './plugin-core' + +class TestCursorOutputPlugin extends CursorOutputPlugin { + constructor(private readonly testHomeDir: string) { + super() + } + + protected override getHomeDir(): string { + return this.testHomeDir + } +} + +function createCleanContext(): OutputCleanContext { + return { + logger: { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} + }, + fs, + path, + glob, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Relative, + path: '.', + basePath: '.', + getDirectoryName: () => '.', + getAbsolutePath: () => path.resolve('.') + }, + projects: [] + } + } + } as OutputCleanContext +} + +describe('cursorOutputPlugin cleanup', () => { + it('expands skills cleanup glob into explicit stale targets while preserving built-in skills', async () => { + const tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cursor-cleanup-')) + const skillsDir = path.join(tempHomeDir, '.cursor', 'skills-cursor') + const preservedDir = path.join(skillsDir, 'create-rule') + const staleDir = path.join(skillsDir, 'legacy-skill') + + fs.mkdirSync(preservedDir, {recursive: true}) + fs.mkdirSync(staleDir, {recursive: true}) + fs.writeFileSync(path.join(preservedDir, 'SKILL.md'), '# preserved', 'utf8') + fs.writeFileSync(path.join(staleDir, 'SKILL.md'), '# stale', 'utf8') + + try { + const plugin = new TestCursorOutputPlugin(tempHomeDir) + const result = await plugin.declareCleanupPaths(createCleanContext()) + const deletePaths = result.delete?.map(target => target.path.replaceAll('\\', '/')) ?? [] + const protectPaths = result.protect?.map(target => target.path.replaceAll('\\', '/')) ?? [] + const normalizedCommandsDir = path.join(tempHomeDir, '.cursor', 'commands').replaceAll('\\', '/') + const normalizedStaleDir = staleDir.replaceAll('\\', '/') + const normalizedPreservedDir = preservedDir.replaceAll('\\', '/') + + expect(deletePaths).toContain(normalizedCommandsDir) + expect(deletePaths).toContain(normalizedStaleDir) + expect(result.delete?.some(target => target.kind === 'glob' && target.path.includes('skills-cursor'))).toBe(false) + expect(deletePaths).not.toContain(normalizedPreservedDir) + expect(protectPaths).toContain(normalizedPreservedDir) + } + finally { + fs.rmSync(tempHomeDir, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/plugins/CursorOutputPlugin.ts b/cli/src/plugins/CursorOutputPlugin.ts new file mode 100644 index 00000000..08cb7610 --- /dev/null +++ b/cli/src/plugins/CursorOutputPlugin.ts @@ -0,0 +1,386 @@ +import type { + CommandPrompt, + OutputCleanContext, + OutputCleanupDeclarations, + OutputCleanupPathDeclaration, + OutputFileDeclaration, + OutputWriteContext, + RulePrompt, + SkillPrompt +} from './plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' +import { + AbstractOutputPlugin, + applySubSeriesGlobPrefix, + filterByProjectConfig, + GlobalConfigDirs, + IgnoreFiles, + McpConfigManager, + OutputFileNames, + OutputSubdirectories, + PLUGIN_NAMES, + PreservedSkills, + transformMcpConfigForCursor +} from './plugin-core' + +const GLOBAL_CONFIG_DIR = GlobalConfigDirs.CURSOR // Constants for local use (consider moving to constants.ts if used by multiple plugins) +const MCP_CONFIG_FILE = OutputFileNames.MCP_CONFIG +const COMMANDS_SUBDIR = OutputSubdirectories.COMMANDS +const RULES_SUBDIR = OutputSubdirectories.RULES +const GLOBAL_RULE_FILE = OutputFileNames.CURSOR_GLOBAL_RULE +const SKILLS_CURSOR_SUBDIR = OutputSubdirectories.CURSOR_SKILLS +const SKILL_FILE_NAME = OutputFileNames.SKILL +const PRESERVED_SKILLS = PreservedSkills.CURSOR + +type CursorOutputSource + = | {readonly kind: 'globalCommand', readonly command: CommandPrompt} + | {readonly kind: 'globalMcpConfig', readonly mcpServers: Record>} + | {readonly kind: 'globalSkill', readonly skill: SkillPrompt} + | {readonly kind: 'globalSkillMcpConfig', readonly rawContent: string} + | {readonly kind: 'globalSkillChildDoc', readonly content: string} + | {readonly kind: 'globalSkillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | {readonly kind: 'projectGlobalRule', readonly content: string} + | {readonly kind: 'ruleMdc', readonly rule: RulePrompt} + | {readonly kind: 'projectIgnoreFile', readonly content: string} + +export class CursorOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('CursorOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: '', + dependsOn: [PLUGIN_NAMES.AgentsOutput], + indexignore: IgnoreFiles.CURSOR, + commands: { + subDir: COMMANDS_SUBDIR, + transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + }, + skills: { + subDir: SKILLS_CURSOR_SUBDIR + }, + rules: { + subDir: RULES_SUBDIR, + prefix: 'rule', // Note: 'rule' not 'rule-' - linkSymbol adds the separator + sourceScopes: ['project', 'global'] + }, + cleanup: { + delete: { + project: { + dirs: ['.cursor/rules'] + }, + global: { + files: ['.cursor/mcp.json'], + dirs: ['.cursor/commands', '.cursor/rules'], + globs: ['.cursor/skills-cursor/*'] + } + }, + protect: { + global: { + dirs: [...PRESERVED_SKILLS].map(skillName => `.cursor/skills-cursor/${skillName}`) + } + } + }, + capabilities: { + prompt: { + scopes: ['global'], + singleScope: false + }, + rules: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + mcp: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + } + } + }) + } + + override async declareCleanupPaths(ctx: OutputCleanContext): Promise { + const declarations = await super.declareCleanupPaths(ctx) + return { + ...declarations, + delete: this.expandCursorSkillCleanupTargets(ctx, declarations.delete ?? []) + } + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {workspace, globalMemory, commands, skills, rules, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const globalDir = this.getGlobalConfigDir() + const projectConfig = this.resolvePromptSourceProjectConfig(ctx) + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['global'])) + + const scopedSkills = skills != null + ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) + : {items: [] as readonly SkillPrompt[]} + const filteredSkills = filterByProjectConfig(scopedSkills.items, projectConfig, 'skills') + const scopedMcpSkills = skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + const filteredMcpSkills = filterByProjectConfig(scopedMcpSkills.items, projectConfig, 'skills') + if (filteredSkills.length > 0) { + for (const skill of filteredSkills) { + const skillName = skill.yamlFrontMatter.name + if (this.isPreservedSkill(skillName)) continue + + const skillDir = path.join(globalDir, SKILLS_CURSOR_SUBDIR, skillName) + declarations.push({ + path: path.join(skillDir, SKILL_FILE_NAME), + scope: 'global', + source: {kind: 'globalSkill', skill} satisfies CursorOutputSource + }) + + if (skill.mcpConfig != null && filteredMcpSkills.includes(skill)) { + declarations.push({ + path: path.join(skillDir, MCP_CONFIG_FILE), + scope: 'global', + source: { + kind: 'globalSkillMcpConfig', + rawContent: skill.mcpConfig.rawContent + } satisfies CursorOutputSource + }) + } + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), + scope: 'global', + source: { + kind: 'globalSkillChildDoc', + content: childDoc.content as string + } satisfies CursorOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope: 'global', + source: { + kind: 'globalSkillResource', + content: resource.content, + encoding: resource.encoding + } satisfies CursorOutputSource + }) + } + } + } + } + + if (filteredMcpSkills.length > 0) { + const manager = new McpConfigManager({fs: ctx.fs, logger: this.log}) + const servers = manager.collectMcpServers(filteredMcpSkills) + if (servers.size > 0) { + const transformed = manager.transformMcpServers(servers, transformMcpConfigForCursor) + declarations.push({ + path: path.join(globalDir, MCP_CONFIG_FILE), + scope: 'global', + source: {kind: 'globalMcpConfig', mcpServers: transformed} satisfies CursorOutputSource + }) + } + } + + if (commands != null && commands.length > 0) { + const scopedCommands = this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, cmd => this.resolveCommandSourceScope(cmd), this.getTopicScopeOverride(ctx, 'commands')) + const filteredCommands = filterByProjectConfig(scopedCommands.items, projectConfig, 'commands') + const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) + for (const cmd of filteredCommands) { + const fileName = this.transformCommandName(cmd, transformOptions) + declarations.push({ + path: path.join(globalDir, COMMANDS_SUBDIR, fileName), + scope: 'global', + source: {kind: 'globalCommand', command: cmd} satisfies CursorOutputSource + }) + } + } + + const activeRuleScopes = new Set(rules != null ? this.selectRuleScopes(ctx, rules) : []) + + const globalRules = rules?.filter(r => this.normalizeSourceScope(this.normalizeRuleScope(r)) === 'global') + if (globalRules != null && activeRuleScopes.has('global')) { + for (const rule of globalRules) { + declarations.push({ + path: path.join(globalDir, RULES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'global', + source: {kind: 'ruleMdc', rule} satisfies CursorOutputSource + }) + } + } + + if (globalMemory != null && activePromptScopes.has('global')) { + const globalRuleContent = this.buildGlobalRuleContent(globalMemory.content as string) + for (const project of workspace.projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, GLOBAL_CONFIG_DIR, RULES_SUBDIR, GLOBAL_RULE_FILE), + scope: 'project', + source: { + kind: 'projectGlobalRule', + content: globalRuleContent + } satisfies CursorOutputSource + }) + } + } + + if (rules != null && rules.length > 0 && activeRuleScopes.has('project')) { + for (const project of workspace.projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + const projectRules = applySubSeriesGlobPrefix( + filterByProjectConfig(rules.filter(r => this.normalizeSourceScope(this.normalizeRuleScope(r)) === 'project'), project.projectConfig, 'rules'), + project.projectConfig + ) + for (const rule of projectRules) { + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, GLOBAL_CONFIG_DIR, RULES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'project', + source: {kind: 'ruleMdc', rule} satisfies CursorOutputSource + }) + } + } + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of workspace.projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) continue + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: { + kind: 'projectIgnoreFile', + content: ignoreFile.content + } satisfies CursorOutputSource + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as CursorOutputSource + switch (source.kind) { + case 'globalCommand': return this.buildCommandContent(source.command) + case 'globalMcpConfig': return JSON.stringify({mcpServers: source.mcpServers}, null, 2) + case 'globalSkill': { + const frontMatterData = this.buildSkillFrontMatter(source.skill) + return buildMarkdownWithFrontMatter(frontMatterData, source.skill.content as string) + } + case 'globalSkillMcpConfig': return source.rawContent + case 'globalSkillChildDoc': + case 'projectGlobalRule': + case 'projectIgnoreFile': return source.content + case 'globalSkillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + case 'ruleMdc': return this.buildRuleMdcContent(source.rule) + default: throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + private buildGlobalRuleContent(content: string): string { + return buildMarkdownWithFrontMatter({description: 'Global prompt (synced)', alwaysApply: true}, content) + } + + private isPreservedSkill(name: string): boolean { return PRESERVED_SKILLS.has(name) } + + private expandCursorSkillCleanupTargets( + ctx: OutputCleanContext, + declarations: readonly OutputCleanupPathDeclaration[] + ): OutputCleanupPathDeclaration[] { + const expanded: OutputCleanupPathDeclaration[] = [] + + for (const declaration of declarations) { + if (!this.isCursorSkillCleanupGlob(declaration)) { + expanded.push(declaration) + continue + } + + for (const matchedTarget of this.listCursorSkillCleanupTargets(ctx, declaration.path)) { + expanded.push({ + path: matchedTarget.path, + kind: matchedTarget.kind, + ...declaration.scope != null ? {scope: declaration.scope} : {}, + ...declaration.label != null ? {label: declaration.label} : {} + }) + } + } + + return expanded + } + + private isCursorSkillCleanupGlob(declaration: OutputCleanupPathDeclaration): boolean { + if (declaration.kind !== 'glob') return false + + const skillsGlob = this.joinPath(this.getGlobalConfigDir(), SKILLS_CURSOR_SUBDIR, '*') + .replaceAll('\\', '/') + + return declaration.path.replaceAll('\\', '/') === skillsGlob + } + + private listCursorSkillCleanupTargets( + ctx: OutputCleanContext, + pattern: string + ): {path: string, kind: 'file' | 'directory'}[] { + const matchedPaths = ctx.glob.sync(pattern.replaceAll('\\', '/'), { + onlyFiles: false, + dot: true, + absolute: true, + followSymbolicLinks: false + }) + + return matchedPaths.flatMap((matchedPath): {path: string, kind: 'file' | 'directory'}[] => { + if (this.isPreservedSkill(path.basename(matchedPath))) return [] + + try { + const stat = ctx.fs.lstatSync(matchedPath) + return [{path: matchedPath, kind: stat.isDirectory() ? 'directory' : 'file'}] + } + catch { + return [] + } + }) + } + + protected buildRuleMdcContent(rule: RulePrompt): string { + const fmData: Record = {alwaysApply: false, globs: rule.globs.length > 0 ? rule.globs.join(', ') : ''} + const raw = buildMarkdownWithFrontMatter(fmData, rule.content) + const lines = raw.split('\n') + const transformedLines = lines.map(line => { + const match = /^(\s*globs:\s*)(['"])(.*)\2\s*$/.exec(line) + if (match == null) return line + const prefix = match[1] ?? 'globs: ' + const value = match[3] ?? '' + if (value.trim().length === 0) return line + return `${prefix}${value}` + }) + return transformedLines.join('\n') + } +} diff --git a/cli/src/plugins/DroidCLIOutputPlugin.ts b/cli/src/plugins/DroidCLIOutputPlugin.ts new file mode 100644 index 00000000..272c1005 --- /dev/null +++ b/cli/src/plugins/DroidCLIOutputPlugin.ts @@ -0,0 +1,57 @@ +import type { + SkillPrompt +} from './plugin-core' +import {AbstractOutputPlugin} from './plugin-core' + +const GLOBAL_MEMORY_FILE = 'AGENTS.md' +const GLOBAL_CONFIG_DIR = '.factory' + +export class DroidCLIOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('DroidCLIOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: GLOBAL_MEMORY_FILE, + commands: { + transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + }, + skills: {}, + cleanup: { + delete: { + project: { + files: [GLOBAL_MEMORY_FILE], + dirs: ['.factory/commands', '.factory/skills'] + }, + workspace: { + dirs: ['.factory/commands', '.factory/skills'] + }, + global: { + files: ['.factory/AGENTS.md'], + dirs: ['.factory/commands', '.factory/skills'] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + } + } + }) // Droid uses default subdir names + } + + protected override buildSkillMainContent(skill: SkillPrompt): string { // Droid-specific: Simplify front matter + const simplifiedFrontMatter = skill.yamlFrontMatter != null // Droid-specific: Simplify front matter + ? {name: skill.yamlFrontMatter.name, description: skill.yamlFrontMatter.description} + : void 0 + + return this.buildMarkdownContent(skill.content as string, simplifiedFrontMatter) + } +} diff --git a/cli/src/plugins/EditorConfigOutputPlugin.ts b/cli/src/plugins/EditorConfigOutputPlugin.ts new file mode 100644 index 00000000..b1a18913 --- /dev/null +++ b/cli/src/plugins/EditorConfigOutputPlugin.ts @@ -0,0 +1,58 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin} from './plugin-core' + +const EDITOR_CONFIG_FILE = '.editorconfig' + +/** + * Output plugin for writing .editorconfig files to project directories. + * Reads EditorConfig files collected by EditorConfigInputPlugin. + */ +export class EditorConfigOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('EditorConfigOutputPlugin', { + cleanup: { + delete: { + project: { + files: [EDITOR_CONFIG_FILE] + } + } + }, + capabilities: {} + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {projects} = ctx.collectedOutputContext.workspace + const {editorConfigFiles} = ctx.collectedOutputContext + + if (editorConfigFiles == null || editorConfigFiles.length === 0) return declarations + + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + for (const config of editorConfigFiles) { + declarations.push({ + path: this.resolvePath(projectDir.basePath, projectDir.path, EDITOR_CONFIG_FILE), + scope: 'project', + source: {content: config.content} + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } +} diff --git a/cli/src/plugins/GeminiCLIOutputPlugin.ts b/cli/src/plugins/GeminiCLIOutputPlugin.ts new file mode 100644 index 00000000..60c09863 --- /dev/null +++ b/cli/src/plugins/GeminiCLIOutputPlugin.ts @@ -0,0 +1,29 @@ +import {AbstractOutputPlugin} from './plugin-core' + +const PROJECT_MEMORY_FILE = 'GEMINI.md' +const GLOBAL_CONFIG_DIR = '.gemini' + +export class GeminiCLIOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('GeminiCLIOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: PROJECT_MEMORY_FILE, + cleanup: { + delete: { + project: { + globs: [PROJECT_MEMORY_FILE] + }, + global: { + files: ['.gemini/GEMINI.md'] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + } + } + }) + } +} diff --git a/cli/src/plugins/GenericSkillsOutputPlugin.ts b/cli/src/plugins/GenericSkillsOutputPlugin.ts new file mode 100644 index 00000000..9efb1e91 --- /dev/null +++ b/cli/src/plugins/GenericSkillsOutputPlugin.ts @@ -0,0 +1,155 @@ +import type { + OutputFileDeclaration, + OutputWriteContext, + SkillPrompt +} from './plugin-core' + +import {Buffer} from 'node:buffer' +import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' +import {AbstractOutputPlugin, filterByProjectConfig} from './plugin-core' + +const PROJECT_SKILLS_DIR = '.agents/skills' +const SKILL_FILE_NAME = 'SKILL.md' +const MCP_CONFIG_FILE = 'mcp.json' + +type GenericSkillOutputSource + = {readonly kind: 'skillMain', readonly skill: SkillPrompt} + | {readonly kind: 'skillMcp', readonly rawContent: string} + | {readonly kind: 'skillChildDoc', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + +/** + * Output plugin that writes skills directly to each project's .agents/skills/ directory. + * + * Structure: + * - Project: /.agents/skills//SKILL.md, mcp.json, child docs, resources + */ +export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('GenericSkillsOutputPlugin', { + outputFileName: SKILL_FILE_NAME, + skills: {}, + cleanup: { + delete: { + project: { + dirs: [PROJECT_SKILLS_DIR] + } + } + }, + capabilities: { + skills: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + mcp: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {projects} = ctx.collectedOutputContext.workspace + const {skills} = ctx.collectedOutputContext + + if (skills == null || skills.length === 0) return declarations + + const selectedSkills = this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'skills') + ) + const selectedMcpSkills = this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') + ) + + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') + const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, project.projectConfig, 'skills') + if (filteredSkills.length === 0) continue + + const projectSkillsDir = this.joinPath( + projectDir.basePath, + projectDir.path, + PROJECT_SKILLS_DIR + ) + + for (const skill of filteredSkills) { + const skillName = skill.yamlFrontMatter.name + const skillDir = this.joinPath(projectSkillsDir, skillName) + + declarations.push({ + path: this.joinPath(skillDir, SKILL_FILE_NAME), + scope: 'project', + source: {kind: 'skillMain', skill} satisfies GenericSkillOutputSource + }) + + if (skill.mcpConfig != null && filteredMcpSkills.includes(skill)) { + declarations.push({ + path: this.joinPath(skillDir, MCP_CONFIG_FILE), + scope: 'project', + source: { + kind: 'skillMcp', + rawContent: skill.mcpConfig.rawContent + } satisfies GenericSkillOutputSource + }) + } + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: this.joinPath(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), + scope: 'project', + source: { + kind: 'skillChildDoc', + content: childDoc.content as string + } satisfies GenericSkillOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: this.joinPath(skillDir, resource.relativePath), + scope: 'project', + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies GenericSkillOutputSource + }) + } + } + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as GenericSkillOutputSource + switch (source.kind) { + case 'skillMain': { + const frontMatterData = this.buildSkillFrontMatter(source.skill) + return buildMarkdownWithFrontMatter(frontMatterData, source.skill.content as string) + } + case 'skillMcp': return source.rawContent + case 'skillChildDoc': return source.content + case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + default: throw new Error(`Unsupported declaration source for ${this.name}`) + } + } +} diff --git a/cli/src/plugins/GitExcludeOutputPlugin.ts b/cli/src/plugins/GitExcludeOutputPlugin.ts new file mode 100644 index 00000000..46c7f62a --- /dev/null +++ b/cli/src/plugins/GitExcludeOutputPlugin.ts @@ -0,0 +1,132 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import * as path from 'node:path' +import {AbstractOutputPlugin, findAllGitRepos, findGitModuleInfoDirs, resolveGitInfoDir} from './plugin-core' + +export class GitExcludeOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('GitExcludeOutputPlugin', {capabilities: {}}) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {workspace, globalGitIgnore, shadowGitExclude} = ctx.collectedOutputContext + const managedContent = this.buildManagedContent(globalGitIgnore, shadowGitExclude) + if (managedContent.length === 0) return declarations + + const finalContent = this.normalizeContent(managedContent) + const writtenPaths = new Set() + const {projects} = workspace + + for (const project of projects) { + if (project.dirFromWorkspacePath == null) continue + + const projectDir = project.dirFromWorkspacePath.getAbsolutePath() + const gitRepoDirs = [projectDir, ...findAllGitRepos(projectDir)] + + for (const repoDir of gitRepoDirs) { + const gitInfoDir = resolveGitInfoDir(repoDir) + if (gitInfoDir == null) continue + + const excludePath = path.join(gitInfoDir, 'exclude') + if (writtenPaths.has(excludePath)) continue + writtenPaths.add(excludePath) + + declarations.push({ + path: excludePath, + scope: 'project', + source: {content: finalContent} + }) + } + } + + const workspaceDir = workspace.directory.path + const workspaceGitInfoDir = resolveGitInfoDir(workspaceDir) + if (workspaceGitInfoDir != null) { + const workspaceExcludePath = path.join(workspaceGitInfoDir, 'exclude') + if (!writtenPaths.has(workspaceExcludePath)) { + writtenPaths.add(workspaceExcludePath) + declarations.push({ + path: workspaceExcludePath, + scope: 'workspace', + source: {content: finalContent} + }) + } + } + + const workspaceNestedRepos = findAllGitRepos(workspaceDir) + for (const repoDir of workspaceNestedRepos) { + const gitInfoDir = resolveGitInfoDir(repoDir) + if (gitInfoDir == null) continue + + const excludePath = path.join(gitInfoDir, 'exclude') + if (writtenPaths.has(excludePath)) continue + writtenPaths.add(excludePath) + declarations.push({ + path: excludePath, + scope: 'workspace', + source: {content: finalContent} + }) + } + + const dotGitDir = path.join(workspaceDir, '.git') + if (this.existsSync(dotGitDir) && this.lstatSync(dotGitDir).isDirectory()) { + for (const moduleInfoDir of findGitModuleInfoDirs(dotGitDir)) { + const excludePath = path.join(moduleInfoDir, 'exclude') + if (writtenPaths.has(excludePath)) continue + writtenPaths.add(excludePath) + declarations.push({ + path: excludePath, + scope: 'workspace', + source: {content: finalContent} + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } + + private buildManagedContent(globalGitIgnore?: string, shadowGitExclude?: string): string { + const parts: string[] = [] + + if (globalGitIgnore != null && globalGitIgnore.trim().length > 0) { // Handle globalGitIgnore first + const sanitized = this.sanitizeContent(globalGitIgnore) + if (sanitized.length > 0) parts.push(sanitized) + } + + if (shadowGitExclude != null && shadowGitExclude.trim().length > 0) { // Handle shadowGitExclude + const sanitized = this.sanitizeContent(shadowGitExclude) + if (sanitized.length > 0) parts.push(sanitized) + } + + if (parts.length === 0) return '' // Return early if no content was added + return parts.join('\n') + } + + private sanitizeContent(content: string): string { + const lines = content.split(/\r?\n/) + const filtered = lines.filter(line => { + const trimmed = line.trim() + if (trimmed.length === 0) return true + return !(trimmed.startsWith('#') && !trimmed.startsWith('\\#')) + }) + return filtered.join('\n').trim() + } + + private normalizeContent(content: string): string { + const trimmed = content.trim() + if (trimmed.length === 0) return '' + return `${trimmed}\n` + } +} diff --git a/cli/src/plugins/JetBrainsAIAssistantCodexOutputPlugin.ts b/cli/src/plugins/JetBrainsAIAssistantCodexOutputPlugin.ts new file mode 100644 index 00000000..1e38d124 --- /dev/null +++ b/cli/src/plugins/JetBrainsAIAssistantCodexOutputPlugin.ts @@ -0,0 +1,391 @@ +import type { + CommandPrompt, + OutputCleanContext, + OutputCleanupDeclarations, + OutputFileDeclaration, + OutputWriteContext, + ProjectChildrenMemoryPrompt, + SkillPrompt +} from './plugin-core' +import * as path from 'node:path' +import {getPlatformFixedDir} from '@truenine/desk-paths' +import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' +import {AbstractOutputPlugin, filterByProjectConfig, PLUGIN_NAMES} from './plugin-core' + +/** + * Represents the filename of the project memory file. + */ +const PROJECT_MEMORY_FILE = 'AGENTS.md' +/** + * Specifies the name of the subdirectory where prompt files are stored. + */ +const PROMPTS_SUBDIR = 'prompts' +/** + * Represents the name of the subdirectory where skill-related resources are stored. + */ +const SKILLS_SUBDIR = 'skills' +/** + * The file name that represents the skill definition file. + */ +const SKILL_FILE_NAME = 'SKILL.md' +const AIASSISTANT_DIR = '.aiassistant' +const RULES_SUBDIR = 'rules' +const ROOT_RULE_FILE = 'always.md' +const CHILD_RULE_FILE_PREFIX = 'glob-' +const RULE_APPLY_ALWAYS = '\u59CB\u7EC8' +const RULE_APPLY_GLOB = '\u6309\u6587\u4EF6\u6A21\u5F0F' +const RULE_GLOB_KEY = '\u6A21\u5F0F' +/** + * Represents the directory name used for storing JetBrains-related resources or files. + */ +const JETBRAINS_VENDOR_DIR = 'JetBrains' +/** + * Represents the directory path where the AIA files are stored. + */ +const AIA_DIR = 'aia' +/** + * Represents the directory path where the Codex-related files are stored. + */ +const CODEX_DIR = 'codex' + +/** + * An array of constant string literals representing the prefixes of JetBrains IDE directory names. + */ +const IDE_DIR_PREFIXES = [ + 'IntelliJIdea', + 'WebStorm', + 'RustRover', + 'PyCharm', + 'PyCharmCE', + 'PhpStorm', + 'GoLand', + 'CLion', + 'DataGrip', + 'RubyMine', + 'Rider', + 'DataSpell', + 'Aqua' +] as const + +type JetBrainsCodexOutputSource + = | {readonly kind: 'projectRuleContent', readonly content: string} + | {readonly kind: 'globalMemory', readonly content: string} + | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'globalSkill', readonly skill: SkillPrompt} + | {readonly kind: 'skillReference', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string} + | {readonly kind: 'ignoreFile', readonly content: string} + +/** + * Represents an output plugin specifically designed for integration with JetBrains AI Assistant Codex. + */ +export class JetBrainsAIAssistantCodexOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('JetBrainsAIAssistantCodexOutputPlugin', { + outputFileName: PROJECT_MEMORY_FILE, + commands: { + subDir: PROMPTS_SUBDIR, + transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + }, + skills: { + subDir: SKILLS_SUBDIR + }, + dependsOn: [PLUGIN_NAMES.AgentsOutput], + indexignore: '.aiignore', + cleanup: { + delete: { + project: { + dirs: ['.aiassistant/rules'] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {workspace, globalMemory, commands, skills, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const {projects} = workspace + const codexDirs = this.resolveCodexDirs() + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) + + if (activePromptScopes.has('project')) { + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + const rulesDir = path.join(projectDir.basePath, projectDir.path, AIASSISTANT_DIR, RULES_SUBDIR) + + if (project.rootMemoryPrompt != null) { + declarations.push({ + path: path.join(rulesDir, ROOT_RULE_FILE), + scope: 'project', + source: { + kind: 'projectRuleContent', + content: this.buildAlwaysRuleContent(project.rootMemoryPrompt.content as string) + } satisfies JetBrainsCodexOutputSource + }) + } + + if (project.childMemoryPrompts != null) { + for (const child of project.childMemoryPrompts) { + declarations.push({ + path: path.join(rulesDir, this.buildChildRuleFileName(child)), + scope: 'project', + source: { + kind: 'projectRuleContent', + content: this.buildGlobRuleContent(child) + } satisfies JetBrainsCodexOutputSource + }) + } + } + } + } + + if (codexDirs.length > 0) { + const projectConfig = this.resolvePromptSourceProjectConfig(ctx) + const scopedCommands = commands != null + ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, cmd => this.resolveCommandSourceScope(cmd), this.getTopicScopeOverride(ctx, 'commands')) + : {items: [] as readonly CommandPrompt[]} + const filteredCommands = filterByProjectConfig(scopedCommands.items, projectConfig, 'commands') + const scopedSkills = skills != null + ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) + : {items: [] as readonly SkillPrompt[]} + const filteredSkills = filterByProjectConfig(scopedSkills.items, projectConfig, 'skills') + const transformOptions = this.getTransformOptionsFromContext(ctx) + + for (const codexDir of codexDirs) { + if (globalMemory != null && activePromptScopes.has('global')) { + declarations.push({ + path: path.join(codexDir, PROJECT_MEMORY_FILE), + scope: 'global', + source: { + kind: 'globalMemory', + content: globalMemory.content as string + } satisfies JetBrainsCodexOutputSource + }) + } + + for (const cmd of filteredCommands) { + declarations.push({ + path: path.join(codexDir, PROMPTS_SUBDIR, this.transformCommandName(cmd, transformOptions)), + scope: 'global', + source: {kind: 'command', command: cmd} satisfies JetBrainsCodexOutputSource + }) + } + + for (const skill of filteredSkills) { + const skillName = skill.yamlFrontMatter?.name ?? skill.dir.getDirectoryName() + const skillDir = path.join(codexDir, SKILLS_SUBDIR, skillName) + declarations.push({ + path: path.join(skillDir, SKILL_FILE_NAME), + scope: 'global', + source: {kind: 'globalSkill', skill} satisfies JetBrainsCodexOutputSource + }) + + if (skill.childDocs != null) { + for (const refDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, refDoc.dir.path.replace(/\.mdx$/, '.md')), + scope: 'global', + source: { + kind: 'skillReference', + content: refDoc.content as string + } satisfies JetBrainsCodexOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope: 'global', + source: { + kind: 'skillResource', + content: resource.content + } satisfies JetBrainsCodexOutputSource + }) + } + } + } + } + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) continue + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: { + kind: 'ignoreFile', + content: ignoreFile.content + } satisfies JetBrainsCodexOutputSource + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as JetBrainsCodexOutputSource + switch (source.kind) { + case 'projectRuleContent': + case 'globalMemory': + case 'skillReference': + case 'skillResource': + case 'ignoreFile': return source.content + case 'command': return this.buildCommandContent(source.command) + case 'globalSkill': return this.buildCodexSkillContent(source.skill) + default: throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + override async declareCleanupPaths(ctx: OutputCleanContext): Promise { + const baseDeclarations = await super.declareCleanupPaths(ctx) + const codexDirs = this.resolveCodexDirs() + if (codexDirs.length === 0) return baseDeclarations + + const dynamicGlobalDeletes = codexDirs.flatMap(codexDir => ([ + {path: path.join(codexDir, PROJECT_MEMORY_FILE), kind: 'file', scope: 'global'}, + {path: path.join(codexDir, PROMPTS_SUBDIR), kind: 'directory', scope: 'global'}, + {path: path.join(codexDir, SKILLS_SUBDIR), kind: 'directory', scope: 'global'} + ] as const)) + const baseDeletes = baseDeclarations.delete ?? [] + + return { + ...baseDeclarations, + delete: [ + ...baseDeletes, + ...dynamicGlobalDeletes + ] + } + } + + private resolveCodexDirs(): string[] { + const baseDir = path.join(getPlatformFixedDir(), JETBRAINS_VENDOR_DIR) + if (!this.existsSync(baseDir)) return [] + + try { + const dirents = this.readdirSync(baseDir, {withFileTypes: true}) + const ideDirs = dirents.filter(dirent => { + if (!dirent.isDirectory()) return false + return this.isSupportedIdeDir(dirent.name) + }) + return ideDirs.map(dirent => path.join(baseDir, dirent.name, AIA_DIR, CODEX_DIR)) + } + catch (error) { + const errMsg = error instanceof Error ? error.message : String(error) + this.log.warn({action: 'scan', type: 'jetbrains', path: baseDir, error: errMsg}) + return [] + } + } + + private buildChildRuleFileName(child: ProjectChildrenMemoryPrompt): string { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalizedPath = childPath + .replaceAll('\\', '/') + .replaceAll(/^\/+|\/+$/g, '') + .replaceAll('/', '-') + + const suffix = normalizedPath.length > 0 ? normalizedPath : 'root' + return `${CHILD_RULE_FILE_PREFIX}${suffix}.md` + } + + private buildChildRulePattern(child: ProjectChildrenMemoryPrompt): string { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalizedPath = childPath + .replaceAll('\\', '/') + .replaceAll(/^\/+|\/+$/g, '') + + if (normalizedPath.length === 0) return '**/*' + return `${normalizedPath}/**` + } + + private buildAlwaysRuleContent(content: string): string { + const fmData: Record = { + apply: RULE_APPLY_ALWAYS + } + + return buildMarkdownWithFrontMatter(fmData, content) + } + + private buildGlobRuleContent(child: ProjectChildrenMemoryPrompt): string { + const pattern = this.buildChildRulePattern(child) + const fmData: Record = { + apply: RULE_APPLY_GLOB, + [RULE_GLOB_KEY]: pattern + } + + return buildMarkdownWithFrontMatter(fmData, child.content as string) + } + + private isSupportedIdeDir(dirName: string): boolean { + return IDE_DIR_PREFIXES.some(prefix => dirName.startsWith(prefix)) + } + + private buildCodexSkillContent(skill: SkillPrompt): string { + const fm = skill.yamlFrontMatter + + const name = this.normalizeSkillName(fm.name, 64) + const description = this.normalizeToSingleLine(fm.description, 1024) + + const metadata: Record = {} + + if (fm.displayName != null) metadata['short-description'] = fm.displayName + if (fm.version != null) metadata['version'] = fm.version + if (fm.author != null) metadata['author'] = fm.author + if (fm.keywords != null && fm.keywords.length > 0) metadata['keywords'] = [...fm.keywords] + + const fmData: Record = { + name, + description + } + + if (Object.keys(metadata).length > 0) fmData['metadata'] = metadata + if (fm.allowTools != null && fm.allowTools.length > 0) fmData['allowed-tools'] = fm.allowTools.join(' ') + + return buildMarkdownWithFrontMatter(fmData, skill.content as string) + } + + private normalizeSkillName(name: string, maxLength: number): string { + let normalized = name + .toLowerCase() + .replaceAll(/[^a-z0-9-]/g, '-') + .replaceAll(/-+/g, '-') + .replaceAll(/^-+|-+$/g, '') + + if (normalized.length > maxLength) normalized = normalized.slice(0, maxLength).replace(/-+$/, '') + + return normalized + } + + private normalizeToSingleLine(text: string, maxLength: number): string { + const singleLine = text.replaceAll(/[\r\n]+/g, ' ').replaceAll(/\s+/g, ' ').trim() + if (singleLine.length > maxLength) return `${singleLine.slice(0, maxLength - 3)}...` + return singleLine + } +} diff --git a/cli/src/plugins/JetBrainsIDECodeStyleConfigOutputPlugin.ts b/cli/src/plugins/JetBrainsIDECodeStyleConfigOutputPlugin.ts new file mode 100644 index 00000000..ba01da57 --- /dev/null +++ b/cli/src/plugins/JetBrainsIDECodeStyleConfigOutputPlugin.ts @@ -0,0 +1,67 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin, IDEKind} from './plugin-core' + +const IDEA_DIR = '.idea' +const CODE_STYLES_DIR = 'codeStyles' + +export class JetBrainsIDECodeStyleConfigOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('JetBrainsIDECodeStyleConfigOutputPlugin', { + cleanup: { + delete: { + project: { + files: ['.editorconfig', '.idea/codeStyles/Project.xml', '.idea/codeStyles/codeStyleConfig.xml', '.idea/.gitignore'] + } + } + }, + capabilities: {} + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {projects} = ctx.collectedOutputContext.workspace + const {jetbrainsConfigFiles, editorConfigFiles} = ctx.collectedOutputContext + const jetbrainsConfigs = [...jetbrainsConfigFiles ?? [], ...editorConfigFiles ?? []] + + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + for (const config of jetbrainsConfigs) { + const targetRelativePath = this.getTargetRelativePath(config) + declarations.push({ + path: this.resolvePath(projectDir.basePath, projectDir.path, targetRelativePath), + scope: 'project', + source: {content: config.content} + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } + + private getTargetRelativePath(config: {type: IDEKind, dir: {path: string}}): string { + const sourcePath = config.dir.path + + if (config.type === IDEKind.EditorConfig) return '.editorconfig' + + if (config.type !== IDEKind.IntellijIDEA) return this.basename(sourcePath) + + const ideaIndex = sourcePath.indexOf(IDEA_DIR) + if (ideaIndex !== -1) return sourcePath.slice(Math.max(0, ideaIndex)) + return this.joinPath(IDEA_DIR, CODE_STYLES_DIR, this.basename(sourcePath)) + } +} diff --git a/cli/src/plugins/OpencodeCLIOutputPlugin.ts b/cli/src/plugins/OpencodeCLIOutputPlugin.ts new file mode 100644 index 00000000..d1262bae --- /dev/null +++ b/cli/src/plugins/OpencodeCLIOutputPlugin.ts @@ -0,0 +1,372 @@ +import type {CommandPrompt, OutputFileDeclaration, OutputWriteContext, SkillPrompt, SubAgentPrompt} from './plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import { + AbstractOutputPlugin, + filterByProjectConfig, + McpConfigManager, + PLUGIN_NAMES, + transformMcpConfigForOpencode +} from './plugin-core' + +const GLOBAL_MEMORY_FILE = 'AGENTS.md' +const GLOBAL_CONFIG_DIR = '.config/opencode' +const OPENCODE_CONFIG_FILE = 'opencode.json' +const OPENCODE_RULES_PLUGIN_NAME = 'opencode-rules@latest' +const PROJECT_RULES_DIR = '.opencode' +const COMMANDS_SUBDIR = 'commands' +const AGENTS_SUBDIR = 'agents' + +type OpencodeOutputSource + = | {readonly kind: 'globalMemory', readonly content: string} + | {readonly kind: 'projectRootMemory', readonly content: string} + | {readonly kind: 'projectChildMemory', readonly content: string} + | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'subAgent', readonly agent: SubAgentPrompt} + | {readonly kind: 'skillMain', readonly skill: SkillPrompt, readonly normalizedSkillName: string} + | {readonly kind: 'skillReference', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | {readonly kind: 'globalMcpConfig', readonly mcpServers: Record>} + +function transformOpencodeCommandFrontMatter( + _cmd: CommandPrompt, + context: { + readonly sourceFrontMatter?: Record + } +): Record { + const frontMatter: Record = {} + const source = context.sourceFrontMatter + + if (source?.['description'] != null) frontMatter['description'] = source['description'] + if (source?.['agent'] != null) frontMatter['agent'] = source['agent'] + if (source?.['model'] != null) frontMatter['model'] = source['model'] + + if (source?.['allowTools'] != null && Array.isArray(source['allowTools'])) { + const tools: Record = {} + for (const tool of source['allowTools']) tools[String(tool)] = true + frontMatter['tools'] = tools + } + + for (const [key, value] of Object.entries(source ?? {})) { + if (!['description', 'agent', 'model', 'allowTools', 'namingCase', 'argumentHint'].includes(key)) frontMatter[key] = value + } + + return frontMatter +} + +/** + * Opencode CLI output plugin. + * Outputs global memory, commands, agents, and skills to ~/.config/opencode/ + */ +export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('OpencodeCLIOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: GLOBAL_MEMORY_FILE, + commands: { + subDir: COMMANDS_SUBDIR, + transformFrontMatter: transformOpencodeCommandFrontMatter + }, + subagents: { + subDir: AGENTS_SUBDIR + }, + skills: { + subDir: 'skills' + }, + cleanup: { + delete: { + project: { + files: [GLOBAL_MEMORY_FILE], + dirs: ['.opencode/commands', '.opencode/agents', '.opencode/skills'] + }, + global: { + files: ['.config/opencode/AGENTS.md', '.config/opencode/opencode.json'], + dirs: ['.config/opencode/commands', '.config/opencode/agents', '.config/opencode/skills'] + }, + xdgConfig: { + files: ['opencode/AGENTS.md', 'opencode/opencode.json'], + dirs: ['opencode/commands', 'opencode/agents', 'opencode/skills'] + } + } + }, + dependsOn: [PLUGIN_NAMES.AgentsOutput], + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + subagents: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + mcp: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {workspace, globalMemory, commands, subAgents, skills} = ctx.collectedOutputContext + const globalDir = this.getGlobalConfigDir() + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) + const selectedCommands = commands != null + ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, cmd => this.resolveCommandSourceScope(cmd), this.getTopicScopeOverride(ctx, 'commands')) + : {items: [] as readonly CommandPrompt[]} + const selectedSubAgents = subAgents != null + ? this.selectSingleScopeItems(subAgents, this.subAgentsConfig.sourceScopes, subAgent => this.resolveSubAgentSourceScope(subAgent), this.getTopicScopeOverride(ctx, 'subagents')) + : {items: [] as readonly SubAgentPrompt[]} + const selectedSkills = skills != null + ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) + : {items: [] as readonly SkillPrompt[]} + const selectedMcpSkills = skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + + if (globalMemory != null && activePromptScopes.has('global')) { + declarations.push({ + path: path.join(globalDir, GLOBAL_MEMORY_FILE), + scope: 'global', + source: { + kind: 'globalMemory', + content: globalMemory.content as string + } satisfies OpencodeOutputSource + }) + } + + if (selectedMcpSkills.items.length > 0) { + const projectConfig = this.resolvePromptSourceProjectConfig(ctx) + const filteredSkills = filterByProjectConfig(selectedMcpSkills.items, projectConfig, 'skills') + const manager = new McpConfigManager({fs: ctx.fs, logger: this.log}) + const servers = manager.collectMcpServers(filteredSkills) + if (servers.size > 0) { + declarations.push({ + path: path.join(globalDir, OPENCODE_CONFIG_FILE), + scope: 'global', + source: { + kind: 'globalMcpConfig', + mcpServers: manager.transformMcpServers(servers, transformMcpConfigForOpencode) + } satisfies OpencodeOutputSource + }) + } + } + + const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) + for (const project of workspace.projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + const basePath = path.join(projectDir.basePath, projectDir.path, PROJECT_RULES_DIR) + + if (project.rootMemoryPrompt != null && activePromptScopes.has('project')) { + declarations.push({ + path: this.resolveFullPath(projectDir), + scope: 'project', + source: { + kind: 'projectRootMemory', + content: project.rootMemoryPrompt.content as string + } satisfies OpencodeOutputSource + }) + } + + if (project.childMemoryPrompts != null && activePromptScopes.has('project')) { + for (const child of project.childMemoryPrompts) { + declarations.push({ + path: this.resolveFullPath(child.dir), + scope: 'project', + source: { + kind: 'projectChildMemory', + content: child.content as string + } satisfies OpencodeOutputSource + }) + } + } + + if (this.commandOutputEnabled && selectedCommands.items.length > 0) { + const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') + for (const cmd of filteredCommands) { + declarations.push({ + path: path.join(basePath, this.commandsConfig.subDir, this.transformCommandName(cmd, transformOptions)), + scope: 'project', + source: {kind: 'command', command: cmd} satisfies OpencodeOutputSource + }) + } + } + + if (this.subAgentOutputEnabled && selectedSubAgents.items.length > 0) { + const filteredSubAgents = filterByProjectConfig(selectedSubAgents.items, project.projectConfig, 'subAgents') + const {subDir} = this.subAgentsConfig + for (const agent of filteredSubAgents) { + declarations.push({ + path: path.join(basePath, subDir, this.transformSubAgentName(agent)), + scope: 'project', + source: {kind: 'subAgent', agent} satisfies OpencodeOutputSource + }) + } + } + + if (this.skillOutputEnabled && selectedSkills.items.length > 0) { + const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') + for (const skill of filteredSkills) { + const normalizedSkillName = this.validateAndNormalizeSkillName((skill.yamlFrontMatter?.name as string | undefined) ?? skill.dir.getDirectoryName()) + const skillDir = path.join(basePath, this.skillsConfig.subDir, normalizedSkillName) + + declarations.push({ + path: path.join(skillDir, 'SKILL.md'), + scope: 'project', + source: { + kind: 'skillMain', + skill, + normalizedSkillName + } satisfies OpencodeOutputSource + }) + + if (skill.childDocs != null) { + for (const refDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, refDoc.dir.path.replace(/\.mdx$/, '.md')), + scope: 'project', + source: { + kind: 'skillReference', + content: refDoc.content as string + } satisfies OpencodeOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope: 'project', + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies OpencodeOutputSource + }) + } + } + } + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as OpencodeOutputSource + switch (source.kind) { + case 'globalMemory': + case 'projectRootMemory': + case 'projectChildMemory': + case 'skillReference': return source.content + case 'command': return this.buildCommandContent(source.command) + case 'subAgent': { + const frontMatter = this.buildOpencodeAgentFrontMatter(source.agent) + return this.buildMarkdownContent(source.agent.content, frontMatter) + } + case 'skillMain': { + const frontMatter = this.buildOpencodeSkillFrontMatter(source.skill, source.normalizedSkillName) + return this.buildMarkdownContent(source.skill.content as string, frontMatter) + } + case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + case 'globalMcpConfig': + return JSON.stringify({ + $schema: 'https://opencode.ai/config.json', + plugin: [OPENCODE_RULES_PLUGIN_NAME], + mcp: source.mcpServers + }, null, 2) + default: throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + private buildOpencodeAgentFrontMatter(agent: SubAgentPrompt): Record { + const frontMatter: Record = {} + const source = agent.yamlFrontMatter as Record | undefined + + if (source?.['description'] != null) frontMatter['description'] = source['description'] + + frontMatter['mode'] = source?.['mode'] ?? 'subagent' + + if (source?.['model'] != null) frontMatter['model'] = source['model'] + if (source?.['temperature'] != null) frontMatter['temperature'] = source['temperature'] + if (source?.['maxSteps'] != null) frontMatter['maxSteps'] = source['maxSteps'] + if (source?.['hidden'] != null) frontMatter['hidden'] = source['hidden'] + + if (source?.['allowTools'] != null && Array.isArray(source['allowTools'])) { + const tools: Record = {} + for (const tool of source['allowTools']) tools[String(tool)] = true + frontMatter['tools'] = tools + } + + if (source?.['permission'] != null && typeof source['permission'] === 'object') frontMatter['permission'] = source['permission'] + + for (const [key, value] of Object.entries(source ?? {})) { + if (!['description', 'mode', 'model', 'temperature', 'maxSteps', 'hidden', 'allowTools', 'permission', 'namingCase', 'name', 'color'].includes(key)) { + frontMatter[key] = value + } + } + + return frontMatter + } + + private buildOpencodeSkillFrontMatter(skill: SkillPrompt, skillName: string): Record { + const frontMatter: Record = {} + const source = skill.yamlFrontMatter as Record | undefined + + frontMatter['name'] = skillName + if (source?.['description'] != null) frontMatter['description'] = source['description'] + + frontMatter['license'] = source?.['license'] ?? 'MIT' + frontMatter['compatibility'] = source?.['compatibility'] ?? 'opencode' + + const metadata: Record = {} + const metadataFields = ['author', 'version', 'keywords', 'category', 'repository', 'displayName'] + + for (const field of metadataFields) { + if (source?.[field] != null) metadata[field] = source[field] + } + + const reservedFields = new Set(['name', 'description', 'license', 'compatibility', 'namingCase', 'allowTools', 'keywords', 'displayName', 'author', 'version']) + for (const [key, value] of Object.entries(source ?? {})) { + if (!reservedFields.has(key)) metadata[key] = value + } + + if (Object.keys(metadata).length > 0) frontMatter['metadata'] = metadata + + return frontMatter + } + + private validateAndNormalizeSkillName(name: string): string { + let normalized = name.toLowerCase() + normalized = normalized.replaceAll(/[^a-z0-9-]+/g, '-') + normalized = normalized.replaceAll(/-+/g, '-') + normalized = normalized.replaceAll(/^-|-$/g, '') + + if (normalized.length === 0) normalized = 'skill' + else if (normalized.length > 64) { + normalized = normalized.slice(0, 64) + normalized = normalized.replace(/-$/, '') + } + + return normalized + } +} diff --git a/cli/src/plugins/QoderIDEPluginOutputPlugin.ts b/cli/src/plugins/QoderIDEPluginOutputPlugin.ts new file mode 100644 index 00000000..af5b676d --- /dev/null +++ b/cli/src/plugins/QoderIDEPluginOutputPlugin.ts @@ -0,0 +1,347 @@ +import type { + CommandPrompt, + OutputFileDeclaration, + OutputWriteContext, + ProjectChildrenMemoryPrompt, + RulePrompt, + RuleScope, + SkillPrompt +} from './plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' +import {AbstractOutputPlugin, applySubSeriesGlobPrefix, filterByProjectConfig} from './plugin-core' + +const QODER_CONFIG_DIR = '.qoder' +const RULES_SUBDIR = 'rules' +const COMMANDS_SUBDIR = 'commands' +const SKILLS_SUBDIR = 'skills' +const GLOBAL_RULE_FILE = 'global.md' +const PROJECT_RULE_FILE = 'always.md' +const CHILD_RULE_FILE_PREFIX = 'glob-' +const SKILL_FILE_NAME = 'SKILL.md' +const MCP_CONFIG_FILE = 'mcp.json' +const TRIGGER_ALWAYS = 'always_on' +const TRIGGER_GLOB = 'glob' +const RULE_GLOB_KEY = 'glob' +const RULE_FILE_PREFIX = 'rule-' + +type QoderOutputSource + = | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'ruleContent', readonly content: string} + | {readonly kind: 'rulePrompt', readonly rule: RulePrompt} + | {readonly kind: 'skillMain', readonly skill: SkillPrompt} + | {readonly kind: 'skillMcpConfig', readonly rawContent: string} + | {readonly kind: 'skillChildDoc', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | {readonly kind: 'ignoreFile', readonly content: string} + +function transformQoderCommandFrontMatter( + _cmd: CommandPrompt, + context: { + readonly sourceFrontMatter?: Record + } +): Record { + const source = context.sourceFrontMatter + + const frontMatter: Record = { + description: 'Fast command', + type: 'user_command' + } + + if (source?.['description'] != null) frontMatter['description'] = source['description'] + if (source?.['argumentHint'] != null) frontMatter['argumentHint'] = source['argumentHint'] + if (source?.['allowTools'] != null && Array.isArray(source['allowTools']) && source['allowTools'].length > 0) frontMatter['allowTools'] = source['allowTools'] + + return frontMatter +} + +export class QoderIDEPluginOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('QoderIDEPluginOutputPlugin', { + globalConfigDir: QODER_CONFIG_DIR, + indexignore: '.qoderignore', + commands: { + subDir: COMMANDS_SUBDIR, + transformFrontMatter: transformQoderCommandFrontMatter + }, + skills: { + subDir: SKILLS_SUBDIR + }, + rules: { + subDir: RULES_SUBDIR, + sourceScopes: ['project', 'global'] + }, + cleanup: { + delete: { + project: { + dirs: ['.qoder/rules'] + }, + global: { + dirs: ['.qoder/commands', '.qoder/rules', '.qoder/skills'] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + rules: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + mcp: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {workspace, globalMemory, commands, skills, rules, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const {projects} = workspace + const globalDir = this.getGlobalConfigDir() + const projectConfig = this.resolvePromptSourceProjectConfig(ctx) + const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) + const activeRuleScopes = new Set(rules != null ? this.selectRuleScopes(ctx, rules) : []) + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) + + if (commands != null && commands.length > 0) { + const scopedCommands = this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, cmd => this.resolveCommandSourceScope(cmd), this.getTopicScopeOverride(ctx, 'commands')) + const filteredCommands = filterByProjectConfig(scopedCommands.items, projectConfig, 'commands') + for (const cmd of filteredCommands) { + declarations.push({ + path: path.join(globalDir, COMMANDS_SUBDIR, this.transformCommandName(cmd, transformOptions)), + scope: 'global', + source: {kind: 'command', command: cmd} satisfies QoderOutputSource + }) + } + } + + if (rules != null && rules.length > 0 && activeRuleScopes.has('global')) { + const globalRules = rules.filter(r => this.normalizeSourceScope(this.normalizeRuleScope(r)) === 'global') + for (const rule of globalRules) { + declarations.push({ + path: path.join(globalDir, RULES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'global', + source: {kind: 'rulePrompt', rule} satisfies QoderOutputSource + }) + } + } + + if (skills != null && skills.length > 0) { + const scopedSkills = this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) + const filteredSkills = filterByProjectConfig(scopedSkills.items, projectConfig, 'skills') + const scopedMcpSkills = this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') + ) + const filteredMcpSkills = filterByProjectConfig(scopedMcpSkills.items, projectConfig, 'skills') + for (const skill of filteredSkills) { + const skillName = skill.yamlFrontMatter.name + const skillDir = path.join(globalDir, SKILLS_SUBDIR, skillName) + declarations.push({ + path: path.join(skillDir, SKILL_FILE_NAME), + scope: 'global', + source: {kind: 'skillMain', skill} satisfies QoderOutputSource + }) + + if (skill.mcpConfig != null && filteredMcpSkills.includes(skill)) { + declarations.push({ + path: path.join(skillDir, MCP_CONFIG_FILE), + scope: 'global', + source: { + kind: 'skillMcpConfig', + rawContent: skill.mcpConfig.rawContent + } satisfies QoderOutputSource + }) + } + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), + scope: 'global', + source: { + kind: 'skillChildDoc', + content: childDoc.content as string + } satisfies QoderOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope: 'global', + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies QoderOutputSource + }) + } + } + } + } + + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + const projectRulesDir = path.join(projectDir.basePath, projectDir.path, QODER_CONFIG_DIR, RULES_SUBDIR) + + if (globalMemory != null && activePromptScopes.has('global')) { + declarations.push({ + path: path.join(projectRulesDir, GLOBAL_RULE_FILE), + scope: 'project', + source: { + kind: 'ruleContent', + content: this.buildAlwaysRuleContent(globalMemory.content as string) + } satisfies QoderOutputSource + }) + } + + if (project.rootMemoryPrompt != null && activePromptScopes.has('project')) { + declarations.push({ + path: path.join(projectRulesDir, PROJECT_RULE_FILE), + scope: 'project', + source: { + kind: 'ruleContent', + content: this.buildAlwaysRuleContent(project.rootMemoryPrompt.content as string) + } satisfies QoderOutputSource + }) + } + + if (project.childMemoryPrompts != null && activePromptScopes.has('project')) { + for (const child of project.childMemoryPrompts) { + declarations.push({ + path: path.join(projectRulesDir, this.buildChildRuleFileName(child)), + scope: 'project', + source: { + kind: 'ruleContent', + content: this.buildGlobRuleContent(child) + } satisfies QoderOutputSource + }) + } + } + + if (rules != null && rules.length > 0 && activeRuleScopes.has('project')) { + const projectRules = applySubSeriesGlobPrefix( + filterByProjectConfig(rules.filter(r => this.normalizeSourceScope(this.normalizeRuleScope(r)) === 'project'), project.projectConfig, 'rules'), + project.projectConfig + ) + for (const rule of projectRules) { + declarations.push({ + path: path.join(projectRulesDir, this.buildRuleFileName(rule)), + scope: 'project', + source: {kind: 'rulePrompt', rule} satisfies QoderOutputSource + }) + } + } + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) continue + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: { + kind: 'ignoreFile', + content: ignoreFile.content + } satisfies QoderOutputSource + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as QoderOutputSource + switch (source.kind) { + case 'command': return this.buildCommandContent(source.command) + case 'ruleContent': return source.content + case 'rulePrompt': return this.buildRuleContent(source.rule) + case 'skillMain': { + const fmData = this.buildSkillFrontMatter(source.skill) + return buildMarkdownWithFrontMatter(fmData, source.skill.content as string) + } + case 'skillMcpConfig': return source.rawContent + case 'skillChildDoc': + case 'ignoreFile': return source.content + case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + default: throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + private buildChildRuleFileName(child: ProjectChildrenMemoryPrompt): string { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '').replaceAll('/', '-') + return `${CHILD_RULE_FILE_PREFIX}${normalized.length > 0 ? normalized : 'root'}.md` + } + + private buildAlwaysRuleContent(content: string): string { + return buildMarkdownWithFrontMatter({trigger: TRIGGER_ALWAYS, type: 'user_command'}, content) + } + + private buildGlobRuleContent(child: ProjectChildrenMemoryPrompt): string { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '') + const pattern = normalized.length === 0 ? '**/*' : `${normalized}/**` + return buildMarkdownWithFrontMatter({trigger: TRIGGER_GLOB, [RULE_GLOB_KEY]: pattern, type: 'user_command'}, child.content as string) + } + + protected override buildSkillFrontMatter(skill: SkillPrompt): Record { + const fm = skill.yamlFrontMatter + return { + name: fm.name, + description: fm.description, + type: 'user_command', + ...fm.displayName != null && {displayName: fm.displayName}, + ...fm.keywords != null && fm.keywords.length > 0 && {keywords: fm.keywords}, + ...fm.author != null && {author: fm.author}, + ...fm.version != null && {version: fm.version}, + ...fm.allowTools != null && fm.allowTools.length > 0 && {allowTools: fm.allowTools} + } + } + + protected override buildRuleFileName(rule: RulePrompt, prefix: string = RULE_FILE_PREFIX): string { + return `${prefix}${rule.prefix}-${rule.ruleName}.md` + } + + protected override buildRuleContent(rule: RulePrompt): string { + const fmData: Record = { + trigger: TRIGGER_GLOB, + [RULE_GLOB_KEY]: rule.globs.length > 0 ? rule.globs.join(', ') : '**/*', + type: 'user_command' + } + return buildMarkdownWithFrontMatter(fmData, rule.content) + } + + protected override normalizeRuleScope(rule: RulePrompt): RuleScope { + return rule.scope ?? 'global' + } +} diff --git a/cli/src/plugins/ReadmeMdConfigFileOutputPlugin.ts b/cli/src/plugins/ReadmeMdConfigFileOutputPlugin.ts new file mode 100644 index 00000000..7a7bd4cd --- /dev/null +++ b/cli/src/plugins/ReadmeMdConfigFileOutputPlugin.ts @@ -0,0 +1,71 @@ +import type { + OutputFileDeclaration, + OutputWriteContext, + ReadmeFileKind +} from './plugin-core' + +import * as path from 'node:path' +import {AbstractOutputPlugin, README_FILE_KIND_MAP} from './plugin-core' + +function resolveOutputFileName(fileKind?: ReadmeFileKind): string { + return README_FILE_KIND_MAP[fileKind ?? 'Readme'].out +} + +/** + * Output plugin for writing readme-family files to project directories. + * Reads README prompts collected by ReadmeMdInputPlugin and writes them + * to the corresponding project directories. + * + * Output mapping: + * - fileKind=Readme → README.md + * - fileKind=CodeOfConduct → CODE_OF_CONDUCT.md + * - fileKind=Security → SECURITY.md + * + * Supports: + * - Root files (written to project root) + * - Child files (written to project subdirectories) + * - Dry-run mode (preview without writing) + * - Clean operation (delete generated files) + */ +export class ReadmeMdConfigFileOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('ReadmeMdConfigFileOutputPlugin', { + outputFileName: 'README.md', + cleanup: { + delete: { + project: { + files: ['README.md', 'CODE_OF_CONDUCT.md', 'SECURITY.md'] + } + } + }, + capabilities: {} + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {readmePrompts} = ctx.collectedOutputContext + if (readmePrompts == null || readmePrompts.length === 0) return declarations + + for (const readme of readmePrompts) { + const outputFileName = resolveOutputFileName(readme.fileKind) + const filePath = path.join(readme.targetDir.basePath, readme.targetDir.path, outputFileName) + declarations.push({ + path: filePath, + scope: 'project', + source: {content: readme.content as string} + }) + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } +} diff --git a/cli/src/plugins/TraeCNIDEOutputPlugin.ts b/cli/src/plugins/TraeCNIDEOutputPlugin.ts new file mode 100644 index 00000000..353bf1ec --- /dev/null +++ b/cli/src/plugins/TraeCNIDEOutputPlugin.ts @@ -0,0 +1,59 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin} from './plugin-core' + +const GLOBAL_MEMORY_FILE = 'GLOBAL.md' +const GLOBAL_CONFIG_DIR = '.trae-cn' +const USER_RULES_SUBDIR = 'user_rules' + +export class TraeCNIDEOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('TraeCNIDEOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: GLOBAL_MEMORY_FILE, + dependsOn: ['TraeIDEOutputPlugin'], + cleanup: { + delete: { + global: { + dirs: ['.trae-cn/user_rules'] + } + } + }, + capabilities: { + prompt: { + scopes: ['global'], + singleScope: false + } + } + }) + } + + private getGlobalUserRulesDir(): string { + return this.joinPath(this.getGlobalConfigDir(), USER_RULES_SUBDIR) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['global'])) + if (!activePromptScopes.has('global')) return [] + + const {globalMemory} = ctx.collectedOutputContext + if (globalMemory == null) return [] + + return [{ + path: this.joinPath(this.getGlobalUserRulesDir(), GLOBAL_MEMORY_FILE), + scope: 'global', + source: {content: globalMemory.content as string} + }] + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } +} diff --git a/cli/src/plugins/TraeIDEOutputPlugin.test.ts b/cli/src/plugins/TraeIDEOutputPlugin.test.ts new file mode 100644 index 00000000..323ba9de --- /dev/null +++ b/cli/src/plugins/TraeIDEOutputPlugin.test.ts @@ -0,0 +1,75 @@ +import type {OutputWriteContext, ProjectChildrenMemoryPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {createLogger, FilePathKind, PromptKind} from './plugin-core' +import {TraeIDEOutputPlugin} from './TraeIDEOutputPlugin' + +function createChildPrompt(relativePath: string, content: string): ProjectChildrenMemoryPrompt { + return { + type: PromptKind.ProjectChildrenMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + markdownContents: [], + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.resolve('tmp/dist/app'), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.resolve('tmp/dist/app', relativePath) + }, + workingChildDirectoryPath: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.resolve('tmp/workspace/project'), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.resolve('tmp/workspace/project', relativePath) + } + } as ProjectChildrenMemoryPrompt +} + +describe('traeIDEOutputPlugin steering rule output', () => { + it('emits project-relative glob and injects output-dir scope guard', async () => { + const plugin = new TraeIDEOutputPlugin() + const workspaceBase = path.resolve('tmp/trae-plugin-test') + const ctx = { + logger: createLogger('TraeIDEOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + { + name: 'project-a', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceBase, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceBase, 'project-a') + }, + childMemoryPrompts: [createChildPrompt('commands', 'Rule body')] + } + ] + } + } + } as OutputWriteContext + + const declarations = await plugin.declareOutputFiles(ctx) + const steering = declarations.find(d => d.source != null && (d.source as {kind?: string}).kind === 'steeringRule') + expect(steering).toBeDefined() + + const {content} = steering!.source as {content: string} + expect(content).toContain('globs: commands/**') + expect(content).toContain('Scope guard: this rule is for the project-root path "commands/" only.') + expect(content).toContain('Do not apply this rule to generated output paths such as "dist/commands/"') + }) +}) diff --git a/cli/src/plugins/TraeIDEOutputPlugin.ts b/cli/src/plugins/TraeIDEOutputPlugin.ts new file mode 100644 index 00000000..26d2e200 --- /dev/null +++ b/cli/src/plugins/TraeIDEOutputPlugin.ts @@ -0,0 +1,260 @@ +import type { + CommandPrompt, + OutputFileDeclaration, + OutputWriteContext, + ProjectChildrenMemoryPrompt, + SkillPrompt +} from './plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' +import {AbstractOutputPlugin, filterByProjectConfig} from './plugin-core' + +const GLOBAL_MEMORY_FILE = 'GLOBAL.md' +const GLOBAL_CONFIG_DIR = '.trae' +const STEERING_SUBDIR = 'steering' +const RULES_SUBDIR = 'rules' +const COMMANDS_SUBDIR = 'commands' +const SKILLS_SUBDIR = 'skills' +const SKILL_FILE_NAME = 'SKILL.md' + +type TraeOutputSource + = | {readonly kind: 'globalMemory', readonly content: string} + | {readonly kind: 'steeringRule', readonly content: string} + | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'skillMain', readonly skill: SkillPrompt} + | {readonly kind: 'skillChildDoc', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | {readonly kind: 'ignoreFile', readonly content: string} + +export class TraeIDEOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('TraeIDEOutputPlugin', { + globalConfigDir: GLOBAL_CONFIG_DIR, + outputFileName: GLOBAL_MEMORY_FILE, + indexignore: '.traeignore', + commands: { + subDir: COMMANDS_SUBDIR, + transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + }, + skills: { + subDir: SKILLS_SUBDIR + }, + cleanup: { + delete: { + project: { + dirs: ['.trae/rules', '.trae/commands', '.trae/skills'] + }, + workspace: { + dirs: ['.trae/commands', '.trae/skills'] + }, + global: { + dirs: ['.trae/steering'] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + } + } + }) + } + + protected override getIgnoreOutputPath(): string | undefined { + if (this.indexignore == null) return void 0 + return path.join('.trae', '.ignore') + } + + private getGlobalSteeringDir(): string { + return this.joinPath(this.getGlobalConfigDir(), STEERING_SUBDIR) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {projects} = ctx.collectedOutputContext.workspace + const {commands, skills, globalMemory, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const projectConfig = this.resolvePromptSourceProjectConfig(ctx) + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) + + if (globalMemory != null && activePromptScopes.has('global')) { + declarations.push({ + path: this.joinPath(this.getGlobalSteeringDir(), GLOBAL_MEMORY_FILE), + scope: 'global', + source: { + kind: 'globalMemory', + content: globalMemory.content as string + } satisfies TraeOutputSource + }) + } + + const scopedCommands = commands != null + ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, cmd => this.resolveCommandSourceScope(cmd), this.getTopicScopeOverride(ctx, 'commands')) + : {items: [] as readonly CommandPrompt[]} + const filteredCommands = filterByProjectConfig(scopedCommands.items, projectConfig, 'commands') + const scopedSkills = skills != null + ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) + : {items: [] as readonly SkillPrompt[]} + const filteredSkills = filterByProjectConfig(scopedSkills.items, projectConfig, 'skills') + const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) + + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + const projectBase = path.join(projectDir.basePath, projectDir.path) + + if (project.childMemoryPrompts != null && activePromptScopes.has('project')) { + for (const child of project.childMemoryPrompts) { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalizedChildPath = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '') + const globPattern = this.buildProjectRelativeGlobPattern(normalizedChildPath) + const steeringContent = buildMarkdownWithFrontMatter( + {alwaysApply: false, globs: globPattern}, + [ + this.buildPathGuardHint(normalizedChildPath), + '', + child.content as string + ].join('\n') + ) + + declarations.push({ + path: path.join(projectBase, GLOBAL_CONFIG_DIR, RULES_SUBDIR, this.buildSteeringFileName(child)), + scope: 'project', + source: { + kind: 'steeringRule', + content: steeringContent + } satisfies TraeOutputSource + }) + } + } + + for (const cmd of filteredCommands) { + const fileName = this.transformCommandName(cmd, transformOptions) + declarations.push({ + path: path.join(projectBase, GLOBAL_CONFIG_DIR, COMMANDS_SUBDIR, fileName), + scope: 'project', + source: {kind: 'command', command: cmd} satisfies TraeOutputSource + }) + } + + for (const skill of filteredSkills) { + const skillName = skill.yamlFrontMatter.name + const skillDir = path.join(projectBase, GLOBAL_CONFIG_DIR, SKILLS_SUBDIR, skillName) + declarations.push({ + path: path.join(skillDir, SKILL_FILE_NAME), + scope: 'project', + source: {kind: 'skillMain', skill} satisfies TraeOutputSource + }) + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), + scope: 'project', + source: { + kind: 'skillChildDoc', + content: childDoc.content as string + } satisfies TraeOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope: 'project', + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies TraeOutputSource + }) + } + } + } + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) continue + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: { + kind: 'ignoreFile', + content: ignoreFile.content + } satisfies TraeOutputSource + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as TraeOutputSource + switch (source.kind) { + case 'globalMemory': + case 'steeringRule': + case 'skillChildDoc': + case 'ignoreFile': return source.content + case 'command': return this.buildCommandContent(source.command) + case 'skillMain': { + const frontMatterData = this.buildSkillFrontMatter(source.skill) + return buildMarkdownWithFrontMatter(frontMatterData, source.skill.content as string) + } + case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + default: throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + protected override buildSkillFrontMatter(skill: SkillPrompt): Record { + const fm: Record = { + description: skill.yamlFrontMatter.description ?? '' + } + + if (skill.yamlFrontMatter.displayName != null) fm['name'] = skill.yamlFrontMatter.displayName + + return fm + } + + private buildSteeringFileName(child: ProjectChildrenMemoryPrompt): string { + const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path + const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '').replaceAll('/', '-') + return `trae-${normalized}.md` + } + + private buildPathGuardHint(normalizedChildPath: string): string { + if (normalizedChildPath.length === 0) { + return 'Scope guard: apply this rule to project source files only; do not apply to generated output directories (for example dist/, build/, out/, .next/, target/).' + } + + return [ + `Scope guard: this rule is for the project-root path "${normalizedChildPath}/" only.`, + `Do not apply this rule to generated output paths such as "dist/${normalizedChildPath}/", "build/${normalizedChildPath}/", "out/${normalizedChildPath}/", ".next/${normalizedChildPath}/", or "target/${normalizedChildPath}/".` + ].join('\n') + } + + private buildProjectRelativeGlobPattern(normalizedChildPath: string): string { + if (normalizedChildPath.length === 0) return '**/*' + return `${normalizedChildPath}/**` + } +} diff --git a/cli/src/plugins/VisualStudioCodeIDEConfigOutputPlugin.ts b/cli/src/plugins/VisualStudioCodeIDEConfigOutputPlugin.ts new file mode 100644 index 00000000..7d9a249b --- /dev/null +++ b/cli/src/plugins/VisualStudioCodeIDEConfigOutputPlugin.ts @@ -0,0 +1,64 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin, IDEKind} from './plugin-core' + +const VSCODE_DIR = '.vscode' + +export class VisualStudioCodeIDEConfigOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('VisualStudioCodeIDEConfigOutputPlugin', { + cleanup: { + delete: { + project: { + files: ['.vscode/settings.json', '.vscode/extensions.json'] + } + } + }, + capabilities: {} + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {projects} = ctx.collectedOutputContext.workspace + const {vscodeConfigFiles} = ctx.collectedOutputContext + const vscodeConfigs = vscodeConfigFiles ?? [] + + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + for (const config of vscodeConfigs) { + const targetRelativePath = this.getTargetRelativePath(config) + declarations.push({ + path: this.resolvePath(projectDir.basePath, projectDir.path, targetRelativePath), + scope: 'project', + source: {content: config.content} + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } + + private getTargetRelativePath(config: {type: IDEKind, dir: {path: string}}): string { + const sourcePath = config.dir.path + + if (config.type !== IDEKind.VSCode) return this.basename(sourcePath) + + const vscodeIndex = sourcePath.indexOf(VSCODE_DIR) + if (vscodeIndex !== -1) return sourcePath.slice(Math.max(0, vscodeIndex)) + return this.joinPath(VSCODE_DIR, this.basename(sourcePath)) + } +} diff --git a/cli/src/plugins/WarpIDEOutputPlugin.ts b/cli/src/plugins/WarpIDEOutputPlugin.ts new file mode 100644 index 00000000..8e681b2e --- /dev/null +++ b/cli/src/plugins/WarpIDEOutputPlugin.ts @@ -0,0 +1,106 @@ +import type { + OutputFileDeclaration, + OutputWriteContext +} from './plugin-core' +import {AbstractOutputPlugin, PLUGIN_NAMES} from './plugin-core' + +const PROJECT_MEMORY_FILE = 'WARP.md' + +export class WarpIDEOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('WarpIDEOutputPlugin', { + outputFileName: PROJECT_MEMORY_FILE, + indexignore: '.warpindexignore', + cleanup: { + delete: { + project: { + files: [PROJECT_MEMORY_FILE] + } + } + }, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {workspace, globalMemory, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const {projects} = workspace + const agentsRegistered = this.shouldSkipDueToPlugin(ctx, PLUGIN_NAMES.AgentsOutput) + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) + + if (agentsRegistered) { + if (globalMemory != null && activePromptScopes.has('global')) { + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + declarations.push({ + path: this.resolveFullPath(projectDir), + scope: 'project', + source: {content: globalMemory.content as string} + }) + } + } + } else { + const globalMemoryContent = this.extractGlobalMemoryContent(ctx) + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + if (project.rootMemoryPrompt != null && activePromptScopes.has('project')) { + const combinedContent = this.combineGlobalWithContent( + globalMemoryContent, + project.rootMemoryPrompt.content as string + ) + declarations.push({ + path: this.resolveFullPath(projectDir), + scope: 'project', + source: {content: combinedContent} + }) + } + + if (project.childMemoryPrompts != null && activePromptScopes.has('project')) { + for (const child of project.childMemoryPrompts) { + declarations.push({ + path: this.resolveFullPath(child.dir), + scope: 'project', + source: {content: child.content as string} + }) + } + } + } + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) continue + declarations.push({ + path: this.resolvePath(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: {content: ignoreFile.content} + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as {content?: string} + if (source.content == null) throw new Error(`Unsupported declaration source for ${this.name}`) + return source.content + } +} diff --git a/cli/src/plugins/WindsurfOutputPlugin.test.ts b/cli/src/plugins/WindsurfOutputPlugin.test.ts new file mode 100644 index 00000000..d85d854f --- /dev/null +++ b/cli/src/plugins/WindsurfOutputPlugin.test.ts @@ -0,0 +1,151 @@ +import type {CommandPrompt, OutputScopeSelection, OutputWriteContext, Project, SkillPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {createLogger, FilePathKind, PromptKind} from './plugin-core' +import {WindsurfOutputPlugin} from './WindsurfOutputPlugin' + +function createCommandPrompt(scope: 'project' | 'workspace' | 'global', seriName: string): CommandPrompt { + return { + type: PromptKind.Command, + content: 'command content', + length: 15, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'dev/build.mdx', + basePath: path.resolve('tmp/dist/commands'), + getDirectoryName: () => 'dev', + getAbsolutePath: () => path.resolve('tmp/dist/commands/dev/build.mdx') + }, + commandPrefix: 'dev', + commandName: 'build', + seriName, + yamlFrontMatter: { + namingCase: 'kebabCase', + description: 'Build command', + scope + }, + markdownContents: [] + } as CommandPrompt +} + +function createSkillPrompt(scope: 'project' | 'workspace' | 'global', seriName: string): SkillPrompt { + return { + type: PromptKind.Skill, + content: 'skill content', + length: 13, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'skills/ship-it', + basePath: path.resolve('tmp/dist/skills'), + getDirectoryName: () => 'ship-it', + getAbsolutePath: () => path.resolve('tmp/dist/skills/ship-it') + }, + seriName, + yamlFrontMatter: { + namingCase: 'kebabCase', + name: 'ship-it', + description: 'Ship release', + scope + }, + markdownContents: [] + } as SkillPrompt +} + +function createProject(workspaceBase: string, name: string, includeSeries: readonly string[], promptSource = false): Project { + return { + name, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: name, + basePath: workspaceBase, + getDirectoryName: () => name, + getAbsolutePath: () => path.join(workspaceBase, name) + }, + isPromptSourceProject: promptSource, + projectConfig: { + includeSeries: [...includeSeries] + } + } as Project +} + +function createWriteContext( + workspaceBase: string, + commands: readonly CommandPrompt[], + skills: readonly SkillPrompt[], + scopeOverrides: { + readonly commands: OutputScopeSelection + readonly skills: OutputScopeSelection + } +): OutputWriteContext { + return { + logger: createLogger('WindsurfOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + pluginOptions: { + outputScopes: { + plugins: { + WindsurfOutputPlugin: scopeOverrides + } + } + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + createProject(workspaceBase, 'alpha-project', ['alpha'], true), + createProject(workspaceBase, 'beta-project', ['beta']) + ] + }, + commands, + skills + } + } as OutputWriteContext +} + +describe('windsurfOutputPlugin scoped commands/skills output', () => { + it('writes workflows and skills to each project when scope is project', async () => { + const workspaceBase = path.resolve('tmp/windsurf-project-scope') + const plugin = new WindsurfOutputPlugin() + const context = createWriteContext( + workspaceBase, + [createCommandPrompt('project', 'alpha')], + [createSkillPrompt('project', 'alpha')], + {commands: 'project', skills: 'project'} + ) + + const declarations = await plugin.declareOutputFiles(context) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(workspaceBase, 'alpha-project', '.windsurf', 'workflows', 'dev-build.md')) + expect(paths).toContain(path.join(workspaceBase, 'alpha-project', '.windsurf', 'skills', 'ship-it', 'SKILL.md')) + expect(paths.some(outputPath => outputPath.includes(path.join('beta-project', '.windsurf')))).toBe(false) + expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) + }) + + it('writes workflows and skills to workspace-local .codeium when scope is workspace', async () => { + const workspaceBase = path.resolve('tmp/windsurf-workspace-scope') + const plugin = new WindsurfOutputPlugin() + const context = createWriteContext( + workspaceBase, + [createCommandPrompt('workspace', 'alpha')], + [createSkillPrompt('workspace', 'alpha')], + {commands: 'workspace', skills: 'workspace'} + ) + + const declarations = await plugin.declareOutputFiles(context) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toContain(path.join(workspaceBase, '.codeium', 'windsurf', 'global_workflows', 'dev-build.md')) + expect(paths).toContain(path.join(workspaceBase, '.codeium', 'windsurf', 'skills', 'ship-it', 'SKILL.md')) + expect(declarations.every(declaration => declaration.scope === 'workspace')).toBe(true) + }) +}) diff --git a/cli/src/plugins/WindsurfOutputPlugin.ts b/cli/src/plugins/WindsurfOutputPlugin.ts new file mode 100644 index 00000000..9db300ed --- /dev/null +++ b/cli/src/plugins/WindsurfOutputPlugin.ts @@ -0,0 +1,288 @@ +import type {CommandPrompt, OutputFileDeclaration, OutputWriteContext, RuleContentOptions, RulePrompt, SkillPrompt} from './plugin-core' +import {Buffer} from 'node:buffer' +import * as path from 'node:path' +import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' +import {AbstractOutputPlugin, applySubSeriesGlobPrefix, filterByProjectConfig, PLUGIN_NAMES} from './plugin-core' + +const CODEIUM_WINDSURF_DIR = '.codeium/windsurf' +const WORKFLOWS_SUBDIR = 'global_workflows' +const PROJECT_WORKFLOWS_SUBDIR = 'workflows' +const MEMORIES_SUBDIR = 'memories' +const GLOBAL_MEMORY_FILE = 'global_rules.md' +const SKILLS_SUBDIR = 'skills' +const SKILL_FILE_NAME = 'SKILL.md' +const WINDSURF_RULES_DIR = '.windsurf' +const WINDSURF_RULES_SUBDIR = 'rules' +const RULE_FILE_PREFIX = 'rule-' + +type WindsurfOutputSource + = | {readonly kind: 'globalMemory', readonly content: string} + | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'skillMain', readonly skill: SkillPrompt} + | {readonly kind: 'skillChildDoc', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | {readonly kind: 'rule', readonly rule: RulePrompt} + | {readonly kind: 'ignoreFile', readonly content: string} + +export class WindsurfOutputPlugin extends AbstractOutputPlugin { + constructor() { + super('WindsurfOutputPlugin', { + globalConfigDir: CODEIUM_WINDSURF_DIR, + outputFileName: '', + dependsOn: [PLUGIN_NAMES.AgentsOutput], + indexignore: '.codeiumignore', + commands: { + subDir: WORKFLOWS_SUBDIR, + transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + }, + skills: { + subDir: SKILLS_SUBDIR + }, + rules: { + sourceScopes: ['project', 'global'] + }, + cleanup: { + delete: { + project: { + dirs: ['.windsurf/rules', '.windsurf/workflows', '.windsurf/global_workflows', '.windsurf/skills', '.codeium/windsurf/global_workflows', '.codeium/windsurf/skills'] + }, + workspace: { + dirs: ['.codeium/windsurf/global_workflows', '.codeium/windsurf/skills'] + }, + global: { + dirs: ['.codeium/windsurf/global_workflows', '.codeium/windsurf/memories', '.codeium/windsurf/skills'] + } + } + }, + capabilities: { + prompt: { + scopes: ['global'], + singleScope: false + }, + rules: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'workspace', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const {workspace, commands, skills, globalMemory, rules, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) + const globalBase = this.getCodeiumWindsurfDir() + const workspaceBase = this.resolveDirectoryPath(workspace.directory) + const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['global'])) + const resolveScopedCodeiumWindsurfBasePath = ( + scope: 'project' | 'workspace' | 'global', + projectDir?: {readonly basePath: string, readonly path: string} + ): string | undefined => { + if (scope === 'global') return globalBase + if (scope === 'workspace') return path.join(workspaceBase, CODEIUM_WINDSURF_DIR) + if (projectDir == null) return void 0 + return path.join(projectDir.basePath, projectDir.path, WINDSURF_RULES_DIR) + } + + if (globalMemory != null && activePromptScopes.has('global')) { + declarations.push({ + path: path.join(globalBase, MEMORIES_SUBDIR, GLOBAL_MEMORY_FILE), + scope: 'global', + source: { + kind: 'globalMemory', + content: globalMemory.content as string + } satisfies WindsurfOutputSource + }) + } + + if (skills != null && skills.length > 0) { + const scopedSkills = this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) + const pushSkillDeclarations = (basePath: string, scope: 'project' | 'workspace' | 'global', skill: SkillPrompt): void => { + const skillName = skill.yamlFrontMatter.name + const skillDir = path.join(basePath, SKILLS_SUBDIR, skillName) + declarations.push({ + path: path.join(skillDir, SKILL_FILE_NAME), + scope, + source: {kind: 'skillMain', skill} satisfies WindsurfOutputSource + }) + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), + scope, + source: { + kind: 'skillChildDoc', + content: childDoc.content as string + } satisfies WindsurfOutputSource + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope, + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } satisfies WindsurfOutputSource + }) + } + } + } + + if (scopedSkills.selectedScope === 'project') { + for (const project of workspace.projects) { + const projectDir = project.dirFromWorkspacePath + const projectBase = resolveScopedCodeiumWindsurfBasePath('project', projectDir) + if (projectBase == null) continue + const filteredSkills = filterByProjectConfig(scopedSkills.items, project.projectConfig, 'skills') + for (const skill of filteredSkills) pushSkillDeclarations(projectBase, 'project', skill) + } + } else if (scopedSkills.selectedScope != null) { + const basePath = resolveScopedCodeiumWindsurfBasePath(scopedSkills.selectedScope) + if (basePath != null) { + const filteredSkills = filterByProjectConfig(scopedSkills.items, promptSourceProjectConfig, 'skills') + for (const skill of filteredSkills) pushSkillDeclarations(basePath, scopedSkills.selectedScope, skill) + } + } + } + + if (commands != null && commands.length > 0) { + const scopedCommands = this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, cmd => this.resolveCommandSourceScope(cmd), this.getTopicScopeOverride(ctx, 'commands')) + const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) + if (scopedCommands.selectedScope === 'project') { + for (const project of workspace.projects) { + const projectDir = project.dirFromWorkspacePath + const projectBase = resolveScopedCodeiumWindsurfBasePath('project', projectDir) + if (projectBase == null) continue + const filteredCommands = filterByProjectConfig(scopedCommands.items, project.projectConfig, 'commands') + for (const cmd of filteredCommands) { + declarations.push({ + path: path.join(projectBase, PROJECT_WORKFLOWS_SUBDIR, this.transformCommandName(cmd, transformOptions)), + scope: 'project', + source: {kind: 'command', command: cmd} satisfies WindsurfOutputSource + }) + } + } + } else if (scopedCommands.selectedScope != null) { + const basePath = resolveScopedCodeiumWindsurfBasePath(scopedCommands.selectedScope) + if (basePath != null) { + const filteredCommands = filterByProjectConfig(scopedCommands.items, promptSourceProjectConfig, 'commands') + for (const cmd of filteredCommands) { + declarations.push({ + path: path.join(basePath, WORKFLOWS_SUBDIR, this.transformCommandName(cmd, transformOptions)), + scope: scopedCommands.selectedScope, + source: {kind: 'command', command: cmd} satisfies WindsurfOutputSource + }) + } + } + } + } + + if (rules != null && rules.length > 0) { + const activeRuleScopes = new Set(this.selectRuleScopes(ctx, rules)) + const globalRules = rules.filter(r => this.normalizeSourceScope(this.normalizeRuleScope(r)) === 'global') + if (activeRuleScopes.has('global')) { + for (const rule of globalRules) { + declarations.push({ + path: path.join(globalBase, MEMORIES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'global', + source: {kind: 'rule', rule} satisfies WindsurfOutputSource + }) + } + } + + if (activeRuleScopes.has('project')) { + for (const project of workspace.projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + const projectRules = applySubSeriesGlobPrefix( + filterByProjectConfig(rules.filter(r => this.normalizeSourceScope(this.normalizeRuleScope(r)) === 'project'), project.projectConfig, 'rules'), + project.projectConfig + ) + for (const rule of projectRules) { + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, WINDSURF_RULES_DIR, WINDSURF_RULES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'project', + source: {kind: 'rule', rule} satisfies WindsurfOutputSource + }) + } + } + } + } + + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + if (ignoreOutputPath != null && ignoreFile != null) { + for (const project of workspace.projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null || project.isPromptSourceProject === true) continue + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: { + kind: 'ignoreFile', + content: ignoreFile.content + } satisfies WindsurfOutputSource + }) + } + } + + return declarations + } + + override async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as WindsurfOutputSource + switch (source.kind) { + case 'globalMemory': + case 'skillChildDoc': + case 'ignoreFile': return source.content + case 'command': return this.buildCommandContent(source.command) + case 'skillMain': { + const frontMatterData = this.buildSkillFrontMatter(source.skill) + return buildMarkdownWithFrontMatter(frontMatterData, source.skill.content as string) + } + case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + case 'rule': return this.buildRuleContent(source.rule) + default: throw new Error(`Unsupported declaration source for ${this.name}`) + } + } + + private getCodeiumWindsurfDir(): string { return path.join(this.getHomeDir(), CODEIUM_WINDSURF_DIR) } + + protected override buildRuleFileName(rule: RulePrompt, prefix: string = RULE_FILE_PREFIX): string { + return `${prefix}${rule.prefix}-${rule.ruleName}.md` + } + + protected override buildRuleContent(rule: RulePrompt, _options?: RuleContentOptions): string { + const fmData: Record = {trigger: 'glob', globs: rule.globs.length > 0 ? rule.globs.join(', ') : ''} + const raw = buildMarkdownWithFrontMatter(fmData, rule.content) + const lines = raw.split('\n') + return lines.map(line => { + const match = /^(\s*globs:\s*)(['"])(.*)\2\s*$/.exec(line) + if (match == null) return line + const prefix = match[1] ?? 'globs: ' + const value = match[3] ?? '' + if (value.trim().length === 0) return line + return `${prefix}${value}` + }).join('\n') + } +} diff --git a/cli/src/plugins/desk-paths/index.ts b/cli/src/plugins/desk-paths.ts similarity index 85% rename from cli/src/plugins/desk-paths/index.ts rename to cli/src/plugins/desk-paths.ts index 39ca1f08..1d72ea66 100644 --- a/cli/src/plugins/desk-paths/index.ts +++ b/cli/src/plugins/desk-paths.ts @@ -280,67 +280,6 @@ export function deleteDirectories(dirs: readonly string[]): DeletionResult { } return {deleted, errors} -} // RelativePath Factory - Construct RelativePath objects - -/** - * Directory path kind discriminator. - */ -export enum FilePathKind { - Relative = 'Relative', - Absolute = 'Absolute', - Root = 'Root' -} - -/** - * A path relative to a base directory. - */ -export interface RelativePath { - readonly pathKind: FilePathKind.Relative - readonly path: string - readonly basePath: string - readonly getDirectoryName: () => string - readonly getAbsolutePath: () => string -} - -/** - * Create a RelativePath from a path string, base path, and directory name function. - * - * @param pathStr - The relative path string - * @param basePath - The base directory for absolute path resolution - * @param dirNameFn - Function returning the directory name - * @returns A RelativePath object - */ -export function createRelativePath( - pathStr: string, - basePath: string, - dirNameFn: () => string -): RelativePath { - return { - pathKind: FilePathKind.Relative, - path: pathStr, - basePath, - getDirectoryName: dirNameFn, - getAbsolutePath: () => path.join(basePath, pathStr) - } -} - -/** - * Create a RelativePath for a file within a parent directory. - * The getDirectoryName delegates to the parent directory's getDirectoryName. - * - * @param dir - Parent directory RelativePath - * @param fileName - Name of the file - * @returns A RelativePath pointing to the file - */ -export function createFileRelativePath(dir: RelativePath, fileName: string): RelativePath { - const filePath = path.join(dir.path, fileName) - return { - pathKind: FilePathKind.Relative, - path: filePath, - basePath: dir.basePath, - getDirectoryName: () => dir.getDirectoryName(), - getAbsolutePath: () => path.join(dir.basePath, filePath) - } } // Safe Write - Dry-run aware file writing with error handling /** @@ -358,7 +297,8 @@ export interface SafeWriteOptions { readonly fullPath: string readonly content: string | Buffer readonly type: string - readonly relativePath: RelativePath + /** 相对路径字符串 (相对于输出目标目录) */ + readonly relativePath: string readonly dryRun: boolean readonly logger: WriteLogger } @@ -367,7 +307,8 @@ export interface SafeWriteOptions { * Result of a safe write operation. */ export interface SafeWriteResult { - readonly path: RelativePath + /** 相对路径字符串 (相对于输出目标目录) */ + readonly path: string readonly success: boolean readonly skipped?: boolean readonly error?: Error diff --git a/cli/src/plugins/ide-config-output.test.ts b/cli/src/plugins/ide-config-output.test.ts new file mode 100644 index 00000000..a7183916 --- /dev/null +++ b/cli/src/plugins/ide-config-output.test.ts @@ -0,0 +1,117 @@ +import type {OutputWriteContext, Project, ProjectIDEConfigFile} from './plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {EditorConfigOutputPlugin} from './EditorConfigOutputPlugin' +import {JetBrainsIDECodeStyleConfigOutputPlugin} from './JetBrainsIDECodeStyleConfigOutputPlugin' +import {createLogger, FilePathKind, IDEKind} from './plugin-core' +import {VisualStudioCodeIDEConfigOutputPlugin} from './VisualStudioCodeIDEConfigOutputPlugin' + +function createProject(workspaceBase: string, name: string, promptSource = false): Project { + return { + name, + isPromptSourceProject: promptSource, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: name, + basePath: workspaceBase, + getDirectoryName: () => name, + getAbsolutePath: () => path.join(workspaceBase, name) + } + } as Project +} + +function createConfigFile(type: IDEKind, sourcePath: string, content: string): ProjectIDEConfigFile { + return { + type, + content, + length: content.length, + filePathKind: FilePathKind.Absolute, + dir: { + pathKind: FilePathKind.Absolute, + path: sourcePath, + getDirectoryName: () => path.basename(sourcePath) + } + } as ProjectIDEConfigFile +} + +function createWriteContext(workspaceBase: string): OutputWriteContext { + return { + logger: createLogger('IdeConfigOutputPluginTest', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + createProject(workspaceBase, 'aindex', true), + createProject(workspaceBase, 'memory-sync') + ] + }, + editorConfigFiles: [ + createConfigFile(IDEKind.EditorConfig, path.join(workspaceBase, 'aindex', 'public', '.editorconfig'), 'root = true\n') + ], + vscodeConfigFiles: [ + createConfigFile(IDEKind.VSCode, path.join(workspaceBase, 'aindex', 'public', '.vscode', 'settings.json'), '{}\n'), + createConfigFile(IDEKind.VSCode, path.join(workspaceBase, 'aindex', 'public', '.vscode', 'extensions.json'), '{}\n') + ], + jetbrainsConfigFiles: [ + createConfigFile(IDEKind.IntellijIDEA, path.join(workspaceBase, 'aindex', 'public', '.idea', '.gitignore'), '/workspace.xml\n'), + createConfigFile(IDEKind.IntellijIDEA, path.join(workspaceBase, 'aindex', 'public', '.idea', 'codeStyles', 'Project.xml'), '\n'), + createConfigFile(IDEKind.IntellijIDEA, path.join(workspaceBase, 'aindex', 'public', '.idea', 'codeStyles', 'codeStyleConfig.xml'), '\n') + ] + } + } as OutputWriteContext +} + +describe('ide config output plugins', () => { + it('includes the prompt source project for editorconfig output', async () => { + const workspaceBase = path.resolve('tmp/ide-output-editorconfig') + const plugin = new EditorConfigOutputPlugin() + const declarations = await plugin.declareOutputFiles(createWriteContext(workspaceBase)) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toEqual([ + path.join(workspaceBase, 'aindex', '.editorconfig'), + path.join(workspaceBase, 'memory-sync', '.editorconfig') + ]) + }) + + it('includes the prompt source project for vscode output', async () => { + const workspaceBase = path.resolve('tmp/ide-output-vscode') + const plugin = new VisualStudioCodeIDEConfigOutputPlugin() + const declarations = await plugin.declareOutputFiles(createWriteContext(workspaceBase)) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toEqual([ + path.join(workspaceBase, 'aindex', '.vscode', 'settings.json'), + path.join(workspaceBase, 'aindex', '.vscode', 'extensions.json'), + path.join(workspaceBase, 'memory-sync', '.vscode', 'settings.json'), + path.join(workspaceBase, 'memory-sync', '.vscode', 'extensions.json') + ]) + }) + + it('includes the prompt source project for jetbrains output', async () => { + const workspaceBase = path.resolve('tmp/ide-output-jetbrains') + const plugin = new JetBrainsIDECodeStyleConfigOutputPlugin() + const declarations = await plugin.declareOutputFiles(createWriteContext(workspaceBase)) + const paths = declarations.map(declaration => declaration.path) + + expect(paths).toEqual([ + path.join(workspaceBase, 'aindex', '.idea', '.gitignore'), + path.join(workspaceBase, 'aindex', '.idea', 'codeStyles', 'Project.xml'), + path.join(workspaceBase, 'aindex', '.idea', 'codeStyles', 'codeStyleConfig.xml'), + path.join(workspaceBase, 'aindex', '.editorconfig'), + path.join(workspaceBase, 'memory-sync', '.idea', '.gitignore'), + path.join(workspaceBase, 'memory-sync', '.idea', 'codeStyles', 'Project.xml'), + path.join(workspaceBase, 'memory-sync', '.idea', 'codeStyles', 'codeStyleConfig.xml'), + path.join(workspaceBase, 'memory-sync', '.editorconfig') + ]) + }) +}) diff --git a/cli/src/plugins/plugin-agentskills-compact/index.ts b/cli/src/plugins/plugin-agentskills-compact.ts similarity index 100% rename from cli/src/plugins/plugin-agentskills-compact/index.ts rename to cli/src/plugins/plugin-agentskills-compact.ts diff --git a/cli/src/plugins/plugin-agentskills-compact/GenericSkillsOutputPlugin.ts b/cli/src/plugins/plugin-agentskills-compact/GenericSkillsOutputPlugin.ts deleted file mode 100644 index db1b1d1c..00000000 --- a/cli/src/plugins/plugin-agentskills-compact/GenericSkillsOutputPlugin.ts +++ /dev/null @@ -1,393 +0,0 @@ -import type { - OutputPluginContext, - OutputWriteContext, - SkillPrompt, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' - -import {Buffer} from 'node:buffer' -import * as fs from 'node:fs' -import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' -import {AbstractOutputPlugin} from '@truenine/plugin-output-shared' -import {FilePathKind} from '../plugin-shared' - -const PROJECT_SKILLS_DIR = '.agents/skills' -const LEGACY_SKILLS_DIR = '.skills' // 旧路径,用于清理 -const SKILL_FILE_NAME = 'SKILL.md' -const MCP_CONFIG_FILE = 'mcp.json' - -/** - * Output plugin that writes skills directly to each project's .agents/skills/ directory. - * - * Structure: - * - Project: /.agents/skills//SKILL.md, mcp.json, child docs, resources - * - * Also cleans up legacy .skills/ directories from previous versions. - */ -export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('GenericSkillsOutputPlugin', {outputFileName: SKILL_FILE_NAME}) - - this.registerCleanEffect('legacy-global-skills-cleanup', async ctx => { // 向后兼容:clean 时清理旧的 ~/.skills 目录 - const legacyGlobalSkillsDir = this.joinPath(this.getHomeDir(), LEGACY_SKILLS_DIR) - if (!this.existsSync(legacyGlobalSkillsDir)) return {success: true, description: 'Legacy global skills dir does not exist, nothing to clean'} - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'legacyCleanup', path: legacyGlobalSkillsDir}) - return {success: true, description: `Would clean legacy global skills dir: ${legacyGlobalSkillsDir}`} - } - try { - const entries = this.readdirSync(legacyGlobalSkillsDir, {withFileTypes: true}) // 只删除 skill 子目录(避免误删用户其他文件) - let cleanedCount = 0 - for (const entry of entries) { - if (entry.isDirectory()) { - const skillDir = this.joinPath(legacyGlobalSkillsDir, entry.name) - const skillFile = this.joinPath(skillDir, SKILL_FILE_NAME) - if (this.existsSync(skillFile)) { // 确认是 skill 目录(包含 SKILL.md)才删除 - fs.rmSync(skillDir, {recursive: true}) - cleanedCount++ - } - } - } - const remainingEntries = this.readdirSync(legacyGlobalSkillsDir) // 如果目录为空则删除目录本身 - if (remainingEntries.length === 0) fs.rmdirSync(legacyGlobalSkillsDir) - this.log.trace({action: 'clean', type: 'legacySkills', dir: legacyGlobalSkillsDir, cleanedCount}) - return {success: true, description: `Cleaned ${cleanedCount} legacy skills from ${legacyGlobalSkillsDir}`} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'clean', type: 'legacySkills', dir: legacyGlobalSkillsDir, error: errMsg}) - return {success: false, description: `Failed to clean legacy skills dir`, error: error as Error} - } - }) - } - - async registerProjectOutputDirs(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - const {skills} = ctx.collectedInputContext - - if (skills == null || skills.length === 0) return results - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - - const skillsDir = this.joinPath(project.dirFromWorkspacePath.path, PROJECT_SKILLS_DIR) // 注册新的 .agents/skills/ 目录 - results.push({ - pathKind: FilePathKind.Relative, - path: skillsDir, - basePath: project.dirFromWorkspacePath.basePath, - getDirectoryName: () => PROJECT_SKILLS_DIR, - getAbsolutePath: () => this.joinPath(project.dirFromWorkspacePath!.basePath, skillsDir) - }) - - const legacySkillsDir = this.joinPath(project.dirFromWorkspacePath.path, LEGACY_SKILLS_DIR) // 注册旧的 .skills/ 目录用于清理 - results.push({ - pathKind: FilePathKind.Relative, - path: legacySkillsDir, - basePath: project.dirFromWorkspacePath.basePath, - getDirectoryName: () => LEGACY_SKILLS_DIR, - getAbsolutePath: () => this.joinPath(project.dirFromWorkspacePath!.basePath, legacySkillsDir) - }) - } - - return results - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - const {skills} = ctx.collectedInputContext - - if (skills == null || skills.length === 0) return results - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - - const projectSkillsDir = this.joinPath( - project.dirFromWorkspacePath.basePath, - project.dirFromWorkspacePath.path, - PROJECT_SKILLS_DIR - ) - - for (const skill of skills) { - const skillName = skill.yamlFrontMatter.name - const skillDir = this.joinPath(projectSkillsDir, skillName) - - results.push({ // 注册 SKILL.md - pathKind: FilePathKind.Relative, - path: this.joinPath(PROJECT_SKILLS_DIR, skillName, SKILL_FILE_NAME), - basePath: this.joinPath(project.dirFromWorkspacePath.basePath, project.dirFromWorkspacePath.path), - getDirectoryName: () => skillName, - getAbsolutePath: () => this.joinPath(skillDir, SKILL_FILE_NAME) - }) - - if (skill.mcpConfig != null) { // 注册 mcp.json(如果有) - results.push({ - pathKind: FilePathKind.Relative, - path: this.joinPath(PROJECT_SKILLS_DIR, skillName, MCP_CONFIG_FILE), - basePath: this.joinPath(project.dirFromWorkspacePath.basePath, project.dirFromWorkspacePath.path), - getDirectoryName: () => skillName, - getAbsolutePath: () => this.joinPath(skillDir, MCP_CONFIG_FILE) - }) - } - - if (skill.childDocs != null) { // 注册 child docs - for (const childDoc of skill.childDocs) { - const outputRelativePath = childDoc.relativePath.replace(/\.mdx$/, '.md') - results.push({ - pathKind: FilePathKind.Relative, - path: this.joinPath(PROJECT_SKILLS_DIR, skillName, outputRelativePath), - basePath: this.joinPath(project.dirFromWorkspacePath.basePath, project.dirFromWorkspacePath.path), - getDirectoryName: () => skillName, - getAbsolutePath: () => this.joinPath(skillDir, outputRelativePath) - }) - } - } - - if (skill.resources != null) { // 注册 resources - for (const resource of skill.resources) { - results.push({ - pathKind: FilePathKind.Relative, - path: this.joinPath(PROJECT_SKILLS_DIR, skillName, resource.relativePath), - basePath: this.joinPath(project.dirFromWorkspacePath.basePath, project.dirFromWorkspacePath.path), - getDirectoryName: () => skillName, - getAbsolutePath: () => this.joinPath(skillDir, resource.relativePath) - }) - } - } - } - } - - return results - } - - async registerGlobalOutputDirs(): Promise { - return [] // 不再使用全局输出目录 - } - - async registerGlobalOutputFiles(): Promise { - return [] // 不再使用全局输出文件 - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {skills} = ctx.collectedInputContext - const {projects} = ctx.collectedInputContext.workspace - - if (skills == null || skills.length === 0) { - this.log.trace({action: 'skip', reason: 'noSkills'}) - return false - } - - if (projects.length !== 0) return true - - this.log.trace({action: 'skip', reason: 'noProjects'}) - return false - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const {projects} = ctx.collectedInputContext.workspace - const {skills} = ctx.collectedInputContext - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - if (skills == null || skills.length === 0) return {files: fileResults, dirs: dirResults} - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - - const projectSkillsDir = this.joinPath( - project.dirFromWorkspacePath.basePath, - project.dirFromWorkspacePath.path, - PROJECT_SKILLS_DIR - ) - - for (const skill of skills) { - const skillResults = await this.writeSkill(ctx, skill, projectSkillsDir) // 将技能文件直接写入项目目录 - fileResults.push(...skillResults) - } - } - - return {files: fileResults, dirs: dirResults} - } - - async writeGlobalOutputs(): Promise { - return {files: [], dirs: []} // 不再写入全局输出,所有技能文件直接写入项目目录 - } - - private async writeSkill( - ctx: OutputWriteContext, - skill: SkillPrompt, - skillsDir: string - ): Promise { - const results: WriteResult[] = [] - const skillName = skill.yamlFrontMatter.name - const skillDir = this.joinPath(skillsDir, skillName) - const skillFilePath = this.joinPath(skillDir, SKILL_FILE_NAME) - - const skillRelativePath: RelativePath = { // Create RelativePath for SKILL.md - pathKind: FilePathKind.Relative, - path: SKILL_FILE_NAME, - basePath: skillDir, - getDirectoryName: () => skillName, - getAbsolutePath: () => skillFilePath - } - - const frontMatterData = this.buildSkillFrontMatter(skill) // Build SKILL.md content with front matter - const bodyContent = skill.content as string - const skillContent = buildMarkdownWithFrontMatter(frontMatterData, bodyContent) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'skill', path: skillFilePath}) - results.push({path: skillRelativePath, success: true, skipped: false}) - } else { - try { - this.ensureDirectory(skillDir) - this.writeFileSync(skillFilePath, skillContent) - this.log.trace({action: 'write', type: 'skill', path: skillFilePath}) - results.push({path: skillRelativePath, success: true}) - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'skill', path: skillFilePath, error: errMsg}) - results.push({path: skillRelativePath, success: false, error: error as Error}) - } - } - - if (skill.mcpConfig != null) { // Write mcp.json if skill has MCP configuration - const mcpResult = await this.writeMcpConfig(ctx, skill, skillDir) - results.push(mcpResult) - } - - if (skill.childDocs != null) { // Write child docs - for (const childDoc of skill.childDocs) { - const childDocResult = await this.writeChildDoc(ctx, childDoc, skillDir, skillName) - results.push(childDocResult) - } - } - - if (skill.resources != null) { // Write resources - for (const resource of skill.resources) { - const resourceResult = await this.writeResource(ctx, resource, skillDir, skillName) - results.push(resourceResult) - } - } - - return results - } - - private async writeMcpConfig( - ctx: OutputWriteContext, - skill: SkillPrompt, - skillDir: string - ): Promise { - const skillName = skill.yamlFrontMatter.name - const mcpConfigPath = this.joinPath(skillDir, MCP_CONFIG_FILE) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: MCP_CONFIG_FILE, - basePath: skillDir, - getDirectoryName: () => skillName, - getAbsolutePath: () => mcpConfigPath - } - - const mcpConfigContent = skill.mcpConfig!.rawContent - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'mcpConfig', path: mcpConfigPath}) - return {path: relativePath, success: true, skipped: false} - } - - try { - this.ensureDirectory(skillDir) - this.writeFileSync(mcpConfigPath, mcpConfigContent) - this.log.trace({action: 'write', type: 'mcpConfig', path: mcpConfigPath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'mcpConfig', path: mcpConfigPath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - private async writeChildDoc( - ctx: OutputWriteContext, - childDoc: {relativePath: string, content: unknown}, - skillDir: string, - skillName: string - ): Promise { - const outputRelativePath = childDoc.relativePath.replace(/\.mdx$/, '.md') // Convert .mdx to .md for output - const childDocPath = this.joinPath(skillDir, outputRelativePath) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: outputRelativePath, - basePath: skillDir, - getDirectoryName: () => skillName, - getAbsolutePath: () => childDocPath - } - - const content = childDoc.content as string - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'childDoc', path: childDocPath}) - return {path: relativePath, success: true, skipped: false} - } - - try { - const parentDir = this.dirname(childDocPath) - this.ensureDirectory(parentDir) - this.writeFileSync(childDocPath, content) - this.log.trace({action: 'write', type: 'childDoc', path: childDocPath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'childDoc', path: childDocPath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - private async writeResource( - ctx: OutputWriteContext, - resource: {relativePath: string, content: string, encoding: 'text' | 'base64'}, - skillDir: string, - skillName: string - ): Promise { - const resourcePath = this.joinPath(skillDir, resource.relativePath) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: resource.relativePath, - basePath: skillDir, - getDirectoryName: () => skillName, - getAbsolutePath: () => resourcePath - } - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'resource', path: resourcePath}) - return {path: relativePath, success: true, skipped: false} - } - - try { - const parentDir = this.dirname(resourcePath) - this.ensureDirectory(parentDir) - - if (resource.encoding === 'base64') { // Handle binary vs text encoding - const buffer = Buffer.from(resource.content, 'base64') - this.writeFileSyncBuffer(resourcePath, buffer) - } else this.writeFileSync(resourcePath, resource.content) - - this.log.trace({action: 'write', type: 'resource', path: resourcePath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'resource', path: resourcePath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } -} diff --git a/cli/src/plugins/plugin-agentsmd/index.ts b/cli/src/plugins/plugin-agentsmd.ts similarity index 100% rename from cli/src/plugins/plugin-agentsmd/index.ts rename to cli/src/plugins/plugin-agentsmd.ts diff --git a/cli/src/plugins/plugin-agentsmd/AgentsOutputPlugin.ts b/cli/src/plugins/plugin-agentsmd/AgentsOutputPlugin.ts deleted file mode 100644 index 5dc0d059..00000000 --- a/cli/src/plugins/plugin-agentsmd/AgentsOutputPlugin.ts +++ /dev/null @@ -1,74 +0,0 @@ -import type { - OutputPluginContext, - OutputWriteContext, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import {AbstractOutputPlugin} from '@truenine/plugin-output-shared' - -const PROJECT_MEMORY_FILE = 'AGENTS.md' - -export class AgentsOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('AgentsOutputPlugin', {outputFileName: PROJECT_MEMORY_FILE}) - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - - for (const project of projects) { - if (project.rootMemoryPrompt != null && project.dirFromWorkspacePath != null) { // Root memory prompt uses project.dirFromWorkspacePath - results.push(this.createFileRelativePath(project.dirFromWorkspacePath, PROJECT_MEMORY_FILE)) - } - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) { - if (child.dir != null && this.isRelativePath(child.dir)) results.push(this.createFileRelativePath(child.dir, PROJECT_MEMORY_FILE)) - } - } - } - - return results - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {workspace} = ctx.collectedInputContext - const hasProjectOutputs = workspace.projects.some( - p => p.rootMemoryPrompt != null || (p.childMemoryPrompts?.length ?? 0) > 0 - ) - - if (hasProjectOutputs) return true - - this.log.trace({action: 'skip', reason: 'noOutputs'}) - return false - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const {projects} = ctx.collectedInputContext.workspace - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - for (const project of projects) { - const projectName = project.name ?? 'unknown' - const projectDir = project.dirFromWorkspacePath - - if (projectDir == null) continue - - if (project.rootMemoryPrompt != null) { // Write root memory prompt (only if exists) - const result = await this.writePromptFile(ctx, projectDir, project.rootMemoryPrompt.content as string, `project:${projectName}/root`) - fileResults.push(result) - } - - if (project.childMemoryPrompts != null) { // Write children memory prompts - for (const child of project.childMemoryPrompts) { - const childResult = await this.writePromptFile(ctx, child.dir, child.content as string, `project:${projectName}/child:${child.workingChildDirectoryPath?.path ?? 'unknown'}`) - fileResults.push(childResult) - } - } - } - - return {files: fileResults, dirs: dirResults} - } -} diff --git a/cli/src/plugins/plugin-claude-code-cli/index.ts b/cli/src/plugins/plugin-claude-code-cli.ts similarity index 100% rename from cli/src/plugins/plugin-claude-code-cli/index.ts rename to cli/src/plugins/plugin-claude-code-cli.ts diff --git a/cli/src/plugins/plugin-claude-code-cli/ClaudeCodeCLIOutputPlugin.ts b/cli/src/plugins/plugin-claude-code-cli/ClaudeCodeCLIOutputPlugin.ts deleted file mode 100644 index 6c9f139d..00000000 --- a/cli/src/plugins/plugin-claude-code-cli/ClaudeCodeCLIOutputPlugin.ts +++ /dev/null @@ -1,117 +0,0 @@ -import type {OutputPluginContext, OutputWriteContext, RulePrompt, WriteResults} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import * as path from 'node:path' -import {buildMarkdownWithFrontMatter, doubleQuoted} from '@truenine/md-compiler/markdown' -import {applySubSeriesGlobPrefix, BaseCLIOutputPlugin, filterRulesByProjectConfig} from '@truenine/plugin-output-shared' - -const PROJECT_MEMORY_FILE = 'CLAUDE.md' -const GLOBAL_CONFIG_DIR = '.claude' -const RULES_SUBDIR = 'rules' -const COMMANDS_SUBDIR = 'commands' -const AGENTS_SUBDIR = 'agents' -const SKILLS_SUBDIR = 'skills' -const RULE_FILE_PREFIX = 'rule-' - -/** - * Output plugin for Claude Code CLI. - * - * Outputs rules to `.claude/rules/` directory with frontmatter format. - * - * @see https://github.com/anthropics/claude-code/issues/26868 - * Known bug: Claude Code CLI has issues with `.claude/rules` directory handling. - * This may affect rule loading behavior in certain scenarios. - */ -export class ClaudeCodeCLIOutputPlugin extends BaseCLIOutputPlugin { - constructor() { - super('ClaudeCodeCLIOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: PROJECT_MEMORY_FILE, - toolPreset: 'claudeCode', - supportsCommands: true, - supportsSubAgents: true, - supportsSkills: true, - commandsSubDir: COMMANDS_SUBDIR, - agentsSubDir: AGENTS_SUBDIR, - skillsSubDir: SKILLS_SUBDIR - }) - } - - protected override buildRuleFileName(rule: RulePrompt, prefix: string = RULE_FILE_PREFIX): string { - return `${prefix}${rule.series}-${rule.ruleName}.md` - } - - protected override buildRuleContent(rule: RulePrompt): string { - if (rule.globs.length === 0) return rule.content - return buildMarkdownWithFrontMatter({paths: rule.globs.map(doubleQuoted)}, rule.content) - } - - override async registerGlobalOutputDirs(_ctx: OutputPluginContext): Promise { - return [] - } - - override async registerGlobalOutputFiles(ctx: OutputPluginContext): Promise { - return super.registerGlobalOutputFiles(ctx) - } - - override async registerProjectOutputDirs(ctx: OutputPluginContext): Promise { - const results = await super.registerProjectOutputDirs(ctx) - const {rules} = ctx.collectedInputContext - if (rules == null || rules.length === 0) return results - for (const project of ctx.collectedInputContext.workspace.projects) { - if (project.dirFromWorkspacePath == null) continue - const projectRules = applySubSeriesGlobPrefix( - filterRulesByProjectConfig(rules, project.projectConfig), - project.projectConfig - ) - if (projectRules.length === 0) continue - const dirPath = path.join(project.dirFromWorkspacePath.path, this.globalConfigDir, RULES_SUBDIR) - results.push(this.createRelativePath(dirPath, project.dirFromWorkspacePath.basePath, () => RULES_SUBDIR)) - } - return results - } - - override async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results = await super.registerProjectOutputFiles(ctx) - const {rules} = ctx.collectedInputContext - if (rules == null || rules.length === 0) return results - for (const project of ctx.collectedInputContext.workspace.projects) { - if (project.dirFromWorkspacePath == null) continue - const projectRules = applySubSeriesGlobPrefix( - filterRulesByProjectConfig(rules, project.projectConfig), - project.projectConfig - ) - for (const rule of projectRules) { - const filePath = path.join(project.dirFromWorkspacePath.path, this.globalConfigDir, RULES_SUBDIR, this.buildRuleFileName(rule)) - results.push(this.createRelativePath(filePath, project.dirFromWorkspacePath.basePath, () => RULES_SUBDIR)) - } - } - return results - } - - override async canWrite(ctx: OutputWriteContext): Promise { - if ((ctx.collectedInputContext.rules?.length ?? 0) > 0) return true - return super.canWrite(ctx) - } - - override async writeGlobalOutputs(ctx: OutputWriteContext): Promise { - return super.writeGlobalOutputs(ctx) - } - - override async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const results = await super.writeProjectOutputs(ctx) - const {rules} = ctx.collectedInputContext - if (rules == null || rules.length === 0) return results - const ruleResults = [] - for (const project of ctx.collectedInputContext.workspace.projects) { - if (project.dirFromWorkspacePath == null) continue - const projectRules = applySubSeriesGlobPrefix( - filterRulesByProjectConfig(rules, project.projectConfig), - project.projectConfig - ) - if (projectRules.length === 0) continue - const rulesDir = path.join(project.dirFromWorkspacePath.basePath, project.dirFromWorkspacePath.path, this.globalConfigDir, RULES_SUBDIR) - for (const rule of projectRules) ruleResults.push(await this.writeFile(ctx, path.join(rulesDir, this.buildRuleFileName(rule)), this.buildRuleContent(rule), 'rule')) - } - return {files: [...results.files, ...ruleResults], dirs: results.dirs} - } -} diff --git a/cli/src/plugins/plugin-core.ts b/cli/src/plugins/plugin-core.ts new file mode 100644 index 00000000..862d27cf --- /dev/null +++ b/cli/src/plugins/plugin-core.ts @@ -0,0 +1,117 @@ +export { + AbstractInputPlugin +} from './plugin-core/AbstractInputPlugin' + +export { + AbstractOutputPlugin +} from './plugin-core/AbstractOutputPlugin' + +export type { + AbstractOutputPluginOptions, + CleanupScopePathsConfig, + CombineOptions, + CommandNameTransformOptions, + CommandOutputConfig, + OutputCleanupConfig, + RuleContentOptions, + RuleOutputConfig, + SkillFrontMatterOptions, + SkillsOutputConfig, + SubAgentNameTransformOptions, + SubAgentsOutputConfig +} from './plugin-core/AbstractOutputPlugin' + +export { + AbstractPlugin +} from './plugin-core/AbstractPlugin' + +export { + DEFAULT_USER_CONFIG, + FileExtensions, + FrontMatterFields, + GlobalConfigDirs, + hasSourcePromptExtension, + IgnoreFiles, + OutputFileNames, + OutputPrefixes, + OutputSubdirectories, + PathPlaceholders, + PLUGIN_NAMES, + PreservedSkills, + SourceLocaleExtensions, + SourcePromptExtensions, + SourcePromptFileExtensions, + ToolPresets +} from './plugin-core/constants' + +export type { + PluginName +} from './plugin-core/constants' + +export { + applySubSeriesGlobPrefix, + filterByProjectConfig, + findAllGitRepos, + findGitModuleInfoDirs, + resolveGitInfoDir +} from './plugin-core/filters' + +export type { + FilterConfigPath, + SeriesFilterable +} from './plugin-core/filters' + +export { + GlobalScopeCollector +} from './plugin-core/GlobalScopeCollector' + +export type { + GlobalScopeCollectorOptions, + ScopeRegistration +} from './plugin-core/GlobalScopeCollector' + +export { + ScopePriority, + ScopeRegistry +} from './plugin-core/GlobalScopeCollector' + +export { + createLocalizedPromptReader, + LocalizedPromptReader +} from './plugin-core/LocalizedPromptReader' + +export { + McpConfigManager, + transformMcpConfigForCursor, + transformMcpConfigForOpencode +} from './plugin-core/McpConfigManager' + +export type { + McpConfigFormat, + McpConfigTransformer, + McpServerEntry, + McpWriteResult, + TransformedMcpConfig +} from './plugin-core/McpConfigManager' + +export { + RegistryWriter +} from './plugin-core/RegistryWriter' + +export { + DEFAULT_SCOPE_PRIORITY, + resolveTopicScopes +} from './plugin-core/scopePolicy' + +export * from './plugin-core/types' + +export { + createLogger, + getGlobalLogLevel, + setGlobalLogLevel +} from '@truenine/logger' + +export type { + ILogger, + LogLevel +} from '@truenine/logger' diff --git a/cli/src/plugins/plugin-input-shared/AbstractInputPlugin.ts b/cli/src/plugins/plugin-core/AbstractInputPlugin.ts similarity index 79% rename from cli/src/plugins/plugin-input-shared/AbstractInputPlugin.ts rename to cli/src/plugins/plugin-core/AbstractInputPlugin.ts index 6619d5c4..4424abef 100644 --- a/cli/src/plugins/plugin-input-shared/AbstractInputPlugin.ts +++ b/cli/src/plugins/plugin-core/AbstractInputPlugin.ts @@ -1,6 +1,6 @@ import type {ParsedMarkdown} from '@truenine/md-compiler/markdown' import type { - CollectedInputContext, + InputCollectedContext, InputEffectContext, InputEffectHandler, InputEffectRegistration, @@ -11,17 +11,16 @@ import type { PluginScopeRegistration, ResolvedBasePaths, YAMLFrontMatter -} from '../plugin-shared' +} from './types' import {spawn} from 'node:child_process' import * as os from 'node:os' import * as path from 'node:path' import {parseMarkdown} from '@truenine/md-compiler/markdown' -import { - AbstractPlugin, - PathPlaceholders, - PluginKind -} from '../plugin-shared' +import {logProtectedDeletionGuardError, ProtectedDeletionGuardError} from '@/ProtectedDeletionGuard' +import {AbstractPlugin} from './AbstractPlugin' +import {PathPlaceholders} from './constants' +import {PluginKind} from './enums' export abstract class AbstractInputPlugin extends AbstractPlugin implements InputPlugin { private readonly inputEffects: InputEffectRegistration[] = [] @@ -74,21 +73,31 @@ export abstract class AbstractInputPlugin extends AbstractPlugin 0 } @@ -111,7 +120,7 @@ export abstract class AbstractInputPlugin extends AbstractPlugin | Promise> + abstract collect(ctx: InputPluginContext): Partial | Promise> protected resolveBasePaths(options: Required): ResolvedBasePaths { const workspaceDirRaw = options.workspaceDir diff --git a/cli/src/plugins/plugin-core/AbstractOutputPlugin.subagents.test.ts b/cli/src/plugins/plugin-core/AbstractOutputPlugin.subagents.test.ts new file mode 100644 index 00000000..1487a29c --- /dev/null +++ b/cli/src/plugins/plugin-core/AbstractOutputPlugin.subagents.test.ts @@ -0,0 +1,113 @@ +import type {OutputWriteContext, SubAgentPrompt} from './types' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {createLogger, FilePathKind, PromptKind} from '../plugin-core' +import {AbstractOutputPlugin} from './AbstractOutputPlugin' + +class TestSubAgentOutputPlugin extends AbstractOutputPlugin { + constructor(options?: ConstructorParameters[1]) { + super('TestSubAgentOutputPlugin', { + globalConfigDir: '.tool', + outputFileName: '', + subagents: { + sourceScopes: ['project'], + ...options?.subagents + } + }) + } +} + +function createSubAgentPrompt(): SubAgentPrompt { + return { + type: PromptKind.SubAgent, + content: 'subagent content', + length: 16, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: 'qa/boot.mdx', + basePath: path.resolve('tmp/dist/subagents'), + getDirectoryName: () => 'boot', + getAbsolutePath: () => path.resolve('tmp/dist/subagents/qa/boot.mdx') + }, + agentPrefix: 'qa', + agentName: 'boot', + yamlFrontMatter: { + namingCase: 'kebabCase', + description: 'subagent desc' + }, + markdownContents: [] + } as SubAgentPrompt +} + +function createWriteContext(subAgents: readonly SubAgentPrompt[]): OutputWriteContext { + const workspaceBase = path.resolve('tmp/workspace') + return { + logger: createLogger('TestSubAgentOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => 'workspace' + }, + projects: [{ + name: 'demo', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'demo', + basePath: workspaceBase, + getDirectoryName: () => 'demo', + getAbsolutePath: () => path.join(workspaceBase, 'demo') + } + }] + }, + subAgents + } + } as OutputWriteContext +} + +describe('abstract output plugin subagent naming', () => { + it('uses prefix-agent.ext by default', async () => { + const plugin = new TestSubAgentOutputPlugin() + const declarations = await plugin.declareOutputFiles(createWriteContext([createSubAgentPrompt()])) + const [declaration] = declarations + + expect(declaration?.path.endsWith(path.join('.tool', 'agents', 'qa-boot.md'))).toBe(true) + }) + + it('supports custom linkSymbol and ext for subagent output names', async () => { + const plugin = new TestSubAgentOutputPlugin({ + subagents: { + sourceScopes: ['project'], + linkSymbol: '_', + ext: '.markdown' + } + }) + const declarations = await plugin.declareOutputFiles(createWriteContext([createSubAgentPrompt()])) + const [declaration] = declarations + + expect(declaration?.path.endsWith(path.join('.tool', 'agents', 'qa_boot.markdown'))).toBe(true) + }) + + it('supports subagents.transformFrontMatter declaratively', async () => { + const plugin = new TestSubAgentOutputPlugin({ + subagents: { + sourceScopes: ['project'], + transformFrontMatter: () => ({role: 'qa'}) + } + }) + const declarations = await plugin.declareOutputFiles(createWriteContext([createSubAgentPrompt()])) + const [declaration] = declarations + if (declaration == null) throw new Error('Expected one subagent declaration') + + const content = await plugin.convertContent(declaration, createWriteContext([createSubAgentPrompt()])) + expect(String(content)).toContain('role:') + expect(String(content)).toContain('subagent content') + }) +}) diff --git a/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts b/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts new file mode 100644 index 00000000..ba817eee --- /dev/null +++ b/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts @@ -0,0 +1,1175 @@ +import type {RegistryWriter} from './RegistryWriter' +import type {CommandPrompt, CommandSeriesPluginOverride, ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputCleanupPathDeclaration, OutputCleanupScope, OutputDeclarationScope, OutputFileDeclaration, OutputPlugin, OutputPluginCapabilities, OutputPluginContext, OutputScopeSelection, OutputScopeTopic, OutputTopicCapability, OutputWriteContext, Path, ProjectConfig, RegistryData, RegistryOperationResult, RulePrompt, RuleScope, SkillPrompt, SubAgentPrompt} from './types' + +import {Buffer} from 'node:buffer' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import process from 'node:process' +import {mdxToMd} from '@truenine/md-compiler' +import {buildMarkdownWithFrontMatter, buildMarkdownWithRawFrontMatter} from '@truenine/md-compiler/markdown' +import {AbstractPlugin} from './AbstractPlugin' +import {FilePathKind, PluginKind} from './enums' +import { + applySubSeriesGlobPrefix, + filterByProjectConfig +} from './filters' +import {GlobalScopeCollector} from './GlobalScopeCollector' +import {resolveTopicScopes} from './scopePolicy' +import {OUTPUT_SCOPE_TOPICS} from './types' + +interface ScopedSourceConfig { + /** Allowed source scopes for the topic */ + readonly sourceScopes?: readonly OutputDeclarationScope[] +} + +/** + * Options for building skill front matter + */ +export interface SkillFrontMatterOptions { + readonly includeTools?: boolean + readonly toolFormat?: 'array' | 'string' + readonly additionalFields?: Record +} + +/** + * Options for building rule content + */ +export interface RuleContentOptions { + readonly fileExtension: '.mdc' | '.md' + readonly alwaysApply: boolean + readonly globJoinPattern: ', ' | '|' | string + readonly frontMatterFormatter?: (globs: string) => unknown + readonly additionalFrontMatter?: Record +} + +/** + * Rule output configuration (declarative) + */ +export interface RuleOutputConfig { + /** Rules subdirectory, default 'rules' */ + readonly subDir?: string + /** Link symbol between series and ruleName, default '-' */ + readonly linkSymbol?: string + /** Rule file prefix, default 'rule' */ + readonly prefix?: string + /** Rule file extension, default '.md' */ + readonly ext?: string + /** Custom frontmatter transformer */ + readonly transformFrontMatter?: (rule: RulePrompt) => Record + /** Allowed rule source scopes, default ['project', 'workspace', 'global'] */ + readonly sourceScopes?: readonly OutputDeclarationScope[] +} + +/** + * Command output configuration (declarative) + */ +export interface CommandOutputConfig { + /** Commands subdirectory, default 'commands' */ + readonly subDir?: string + /** Custom command frontmatter transformer */ + readonly transformFrontMatter?: (cmd: CommandPrompt, context: { + readonly sourceFrontMatter?: Record + readonly isRecompiled: boolean + }) => Record + /** Allowed command source scopes, default ['project', 'workspace', 'global'] */ + readonly sourceScopes?: readonly OutputDeclarationScope[] +} + +/** + * SubAgent output configuration (declarative) + */ +export interface SubAgentsOutputConfig extends ScopedSourceConfig { + /** SubAgents subdirectory, default 'agents' */ + readonly subDir?: string + /** Whether to include input-derived prefix in output filename, default true */ + readonly includePrefix?: boolean + /** Separator between prefix and agent name, default '-' */ + readonly linkSymbol?: string + /** SubAgent file extension, default '.md' */ + readonly ext?: string + /** Optional frontmatter transformer */ + readonly transformFrontMatter?: (subAgent: SubAgentPrompt, context: { + readonly sourceFrontMatter?: Record + }) => Record +} + +/** + * Skills output configuration (declarative) + */ +export interface SkillsOutputConfig extends ScopedSourceConfig { + /** Skills subdirectory, default 'skills' */ + readonly subDir?: string +} + +/** + * Options for transforming command names in output filenames. + * Used by transformCommandName method to control prefix handling. + */ +export interface CommandNameTransformOptions { + readonly includeSeriesPrefix?: boolean + readonly seriesSeparator?: string +} + +/** + * Options for transforming subagent names in output filenames. + */ +export interface SubAgentNameTransformOptions { + readonly includePrefix?: boolean + readonly linkSymbol?: string + readonly ext?: string +} + +/** + * Cleanup path entries for one scope. + * Relative paths are resolved by scope base: + * - project: project root + * - workspace: workspace root + * - global: user home + * - xdgConfig: XDG config home (defaults to ~/.config) + */ +export interface CleanupScopePathsConfig { + readonly files?: readonly string[] + readonly dirs?: readonly string[] + readonly globs?: readonly string[] +} + +/** + * Declarative cleanup configuration for output plugins. + */ +export interface OutputCleanupConfig { + readonly delete?: Partial> + readonly protect?: Partial> + readonly excludeScanGlobs?: readonly string[] +} + +/** + * Options for configuring AbstractOutputPlugin subclasses. + */ +export interface AbstractOutputPluginOptions { + globalConfigDir?: string + + outputFileName?: string + + dependsOn?: readonly string[] + + indexignore?: string + + /** Command output configuration (declarative) */ + commands?: CommandOutputConfig + + /** SubAgent output configuration (declarative) */ + subagents?: SubAgentsOutputConfig + + /** Skills output configuration (declarative) */ + skills?: SkillsOutputConfig + + toolPreset?: string + + /** Rule output configuration (declarative) */ + rules?: RuleOutputConfig + + /** Cleanup configuration (declarative) */ + cleanup?: OutputCleanupConfig + + /** Explicit output capability matrix for scope override validation */ + capabilities?: OutputPluginCapabilities +} + +/** + * Options for combining global content with project content. + */ +export interface CombineOptions { + separator?: string + + skipIfEmpty?: boolean + + position?: 'before' | 'after' +} + +type DeclarativeOutputSource + = | {readonly kind: 'projectRootMemory', readonly content: string} + | {readonly kind: 'projectChildMemory', readonly content: string} + | {readonly kind: 'globalMemory', readonly content: string} + | {readonly kind: 'command', readonly command: CommandPrompt} + | {readonly kind: 'subAgent', readonly subAgent: SubAgentPrompt} + | {readonly kind: 'skillMain', readonly skill: SkillPrompt} + | {readonly kind: 'skillReference', readonly content: string} + | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | {readonly kind: 'rule', readonly rule: RulePrompt} + | {readonly kind: 'ignoreFile', readonly content: string} + +export abstract class AbstractOutputPlugin extends AbstractPlugin implements OutputPlugin { + readonly declarativeOutput = true as const + + readonly outputCapabilities: OutputPluginCapabilities + + protected readonly globalConfigDir: string + + protected readonly outputFileName: string + + protected readonly indexignore: string | undefined + + protected readonly commandsConfig: { + readonly subDir: string + readonly transformFrontMatter?: (cmd: CommandPrompt, context: { + readonly sourceFrontMatter?: Record + readonly isRecompiled: boolean + }) => Record + readonly sourceScopes: readonly OutputDeclarationScope[] + } + + protected readonly subAgentsConfig: { + readonly subDir: string + readonly sourceScopes: readonly OutputDeclarationScope[] + readonly includePrefix: boolean + readonly linkSymbol: string + readonly ext: string + readonly transformFrontMatter?: (subAgent: SubAgentPrompt, context: { + readonly sourceFrontMatter?: Record + }) => Record + } + + protected readonly commandOutputEnabled: boolean + + protected readonly subAgentOutputEnabled: boolean + + protected readonly skillsConfig: { + readonly subDir: string + readonly sourceScopes: readonly OutputDeclarationScope[] + } + + protected readonly skillOutputEnabled: boolean + + protected readonly toolPreset: string | undefined + + /** Rule output configuration */ + protected readonly rulesConfig: RuleOutputConfig + + protected readonly ruleOutputEnabled: boolean + + protected readonly cleanupConfig: OutputCleanupConfig + + private readonly registryWriterCache: Map> = new Map() + + protected constructor(name: string, options?: AbstractOutputPluginOptions) { + super(name, PluginKind.Output, options?.dependsOn) + this.globalConfigDir = options?.globalConfigDir ?? '' + this.outputFileName = options?.outputFileName ?? '' + this.indexignore = options?.indexignore + + const commandFrontMatterTransformer = options?.commands?.transformFrontMatter + this.commandOutputEnabled = options?.commands != null + this.commandsConfig = { + subDir: options?.commands?.subDir ?? 'commands', + sourceScopes: options?.commands?.sourceScopes ?? ['project', 'workspace', 'global'], + ...commandFrontMatterTransformer != null && {transformFrontMatter: commandFrontMatterTransformer} + } // Initialize command output config with defaults + this.subAgentOutputEnabled = options?.subagents != null + this.subAgentsConfig = { + subDir: options?.subagents?.subDir ?? 'agents', + sourceScopes: options?.subagents?.sourceScopes ?? ['project', 'workspace', 'global'], + includePrefix: options?.subagents?.includePrefix ?? true, + linkSymbol: options?.subagents?.linkSymbol ?? '-', + ext: options?.subagents?.ext ?? '.md', + ...options?.subagents?.transformFrontMatter != null && {transformFrontMatter: options.subagents.transformFrontMatter} + } // Initialize subAgent output config with defaults + this.skillOutputEnabled = options?.skills != null + this.skillsConfig = { + subDir: options?.skills?.subDir ?? 'skills', + sourceScopes: options?.skills?.sourceScopes ?? ['project', 'workspace', 'global'] + } + this.toolPreset = options?.toolPreset + + this.ruleOutputEnabled = options?.rules != null + this.rulesConfig = { + ...options?.rules, + sourceScopes: options?.rules?.sourceScopes ?? ['project', 'workspace', 'global'] + } // Initialize rule output config with defaults + this.cleanupConfig = options?.cleanup ?? {} + + this.outputCapabilities = options?.capabilities != null + ? this.normalizeCapabilities(options.capabilities) + : this.buildInferredCapabilities() + } + + private buildInferredCapabilities(): OutputPluginCapabilities { + const capabilities: OutputPluginCapabilities = {} + + if (this.outputFileName.length > 0) { + capabilities.prompt = { + scopes: ['project', 'global'], + singleScope: false + } + } + + if (this.ruleOutputEnabled) { + capabilities.rules = { + scopes: this.rulesConfig.sourceScopes ?? ['project', 'workspace', 'global'], + singleScope: false + } + } + + if (this.commandOutputEnabled) { + capabilities.commands = { + scopes: this.commandsConfig.sourceScopes, + singleScope: true + } + } + + if (this.subAgentOutputEnabled) { + capabilities.subagents = { + scopes: this.subAgentsConfig.sourceScopes, + singleScope: true + } + } + + if (this.skillOutputEnabled) { + capabilities.skills = { + scopes: this.skillsConfig.sourceScopes, + singleScope: true + } + } + + return capabilities + } + + private normalizeCapabilities( + capabilities: OutputPluginCapabilities + ): OutputPluginCapabilities { + const normalizedCapabilities: OutputPluginCapabilities = {} + for (const topic of OUTPUT_SCOPE_TOPICS) { + const capability = capabilities[topic] + if (capability == null) continue + + const normalized = this.normalizeCapability(capability) + if (normalized != null) normalizedCapabilities[topic] = normalized + } + return normalizedCapabilities + } + + private normalizeCapability( + capability: OutputTopicCapability + ): OutputTopicCapability | undefined { + const uniqueScopes: OutputDeclarationScope[] = [] + for (const scope of capability.scopes) { + if (!uniqueScopes.includes(scope)) uniqueScopes.push(scope) + } + if (uniqueScopes.length === 0) return void 0 + return { + scopes: uniqueScopes, + singleScope: capability.singleScope + } + } + + protected resolvePromptSourceProjectConfig(ctx: OutputPluginContext | OutputWriteContext): ProjectConfig | undefined { + const {projects} = ctx.collectedOutputContext.workspace + const promptSource = projects.find(p => p.isPromptSourceProject === true) + return promptSource?.projectConfig ?? projects[0]?.projectConfig + } + + protected isRelativePath(p: Path): boolean { + return p.pathKind === FilePathKind.Relative + } + + protected toRelativePath(p: Path): string { + return p.path + } + + protected resolveFullPath(targetPath: Path, outputFileName?: string): string { + const dirPath = this.resolveDirectoryPath(targetPath) + + const fileName = outputFileName ?? this.outputFileName // Append the output file name if provided or if default is set + if (fileName) return path.join(dirPath, fileName) + return dirPath + } + + protected resolveDirectoryPath(targetPath: Path): string { + if (targetPath.pathKind === FilePathKind.Absolute) return targetPath.path + if ('basePath' in targetPath) return path.resolve(targetPath.basePath as string, targetPath.path) + return path.resolve(process.cwd(), targetPath.path) + } + + protected getWorkspaceConfigDir(ctx: OutputWriteContext): string { + const workspaceDir = this.resolveDirectoryPath(ctx.collectedOutputContext.workspace.directory) + return path.join(workspaceDir, this.globalConfigDir) + } + + protected createRelativePath( + pathStr: string, + basePath: string, + _dirNameFn: () => string + ): string { + return path.join(basePath, pathStr) + } + + protected createFileRelativePath(dir: string, fileName: string): string { + return path.join(dir, fileName) + } + + protected getGlobalConfigDir(): string { + return path.join(this.getHomeDir(), this.globalConfigDir) + } + + protected getXdgConfigHomeDir(): string { + const xdgConfigHome = process.env['XDG_CONFIG_HOME'] + if (typeof xdgConfigHome === 'string' && xdgConfigHome.trim().length > 0) return xdgConfigHome + return path.join(this.getHomeDir(), '.config') + } + + protected getHomeDir(): string { + return os.homedir() + } + + protected joinPath(...segments: string[]): string { + return path.join(...segments) + } + + protected resolvePath(...segments: string[]): string { + return path.resolve(...segments) + } + + protected dirname(p: string): string { + return path.dirname(p) + } + + protected basename(p: string, ext?: string): string { + return path.basename(p, ext) + } + + protected existsSync(p: string): boolean { + return fs.existsSync(p) + } + + protected lstatSync(p: string): fs.Stats { + return fs.lstatSync(p) + } + + protected readdirSync(dir: string, options: {withFileTypes: true}): fs.Dirent[] + protected readdirSync(dir: string): string[] + protected readdirSync(dir: string, options?: {withFileTypes?: boolean}): fs.Dirent[] | string[] { + if (options?.withFileTypes === true) return fs.readdirSync(dir, {withFileTypes: true}) + return fs.readdirSync(dir) + } + + protected getIgnoreOutputPath(): string | undefined { + if (this.indexignore == null) return void 0 + return this.indexignore + } + + private resolveCleanupScopeBasePaths( + scope: OutputCleanupScope, + ctx: OutputCleanContext + ): readonly string[] { + if (scope === 'global') return [this.getHomeDir()] + if (scope === 'workspace') return [this.resolveDirectoryPath(ctx.collectedOutputContext.workspace.directory)] + if (scope === 'xdgConfig') return [this.getXdgConfigHomeDir()] + + const projectBasePaths: string[] = [] + for (const project of ctx.collectedOutputContext.workspace.projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + projectBasePaths.push(this.resolveDirectoryPath(projectDir)) + } + return projectBasePaths + } + + private resolveCleanupDeclaredPath(basePath: string, declaredPath: string): string { + if (path.isAbsolute(declaredPath)) return path.resolve(declaredPath) + if (declaredPath === '~') return this.getHomeDir() + if (declaredPath.startsWith('~/') || declaredPath.startsWith('~\\')) return path.resolve(this.getHomeDir(), declaredPath.slice(2)) + return path.resolve(basePath, declaredPath) + } + + private normalizeGlobPattern(rawPattern: string): string { + return rawPattern.replaceAll('\\', '/') + } + + private buildCleanupTargetsFromScopeConfig( + scopeConfig: Partial> | undefined, + kind: 'delete' | 'protect', + ctx: OutputCleanContext + ): readonly OutputCleanupPathDeclaration[] { + if (scopeConfig == null) return [] + + const declarations: OutputCleanupPathDeclaration[] = [] + const scopes: readonly OutputCleanupScope[] = ['project', 'workspace', 'global', 'xdgConfig'] + + const pushTargets = ( + scope: OutputCleanupScope, + targetKind: 'file' | 'directory' | 'glob', + entries: readonly string[] | undefined + ): void => { + if (entries == null || entries.length === 0) return + const basePaths = this.resolveCleanupScopeBasePaths(scope, ctx) + + for (const entry of entries) { + for (const basePath of basePaths) { + const resolved = path.isAbsolute(entry) + ? path.resolve(entry) + : this.resolveCleanupDeclaredPath(basePath, entry) + + declarations.push({ + path: targetKind === 'glob' ? this.normalizeGlobPattern(resolved) : resolved, + kind: targetKind, + scope, + label: `${kind}.${scope}` + }) + } + } + } + + for (const scope of scopes) { + const entries = scopeConfig[scope] + if (entries == null) continue + pushTargets(scope, 'file', entries.files) + pushTargets(scope, 'directory', entries.dirs) + pushTargets(scope, 'glob', entries.globs) + } + + return declarations + } + + protected buildMarkdownContent(content: string, frontMatter?: Record): string { + return buildMarkdownWithFrontMatter(frontMatter, content) + } + + protected buildMarkdownContentWithRaw( + content: string, + frontMatter?: Record, + rawFrontMatter?: string + ): string { + if (frontMatter != null && Object.keys(frontMatter).length > 0) return buildMarkdownWithFrontMatter(frontMatter, content) // If we have parsed front matter, use it + + if (rawFrontMatter != null && rawFrontMatter.length > 0) return buildMarkdownWithRawFrontMatter(rawFrontMatter, content) // If we have raw front matter but parsing failed, use raw + + return content // No front matter + } + + protected extractGlobalMemoryContent(ctx: OutputWriteContext): string | undefined { + return ctx.collectedOutputContext.globalMemory?.content as string | undefined + } + + protected combineGlobalWithContent( + globalContent: string | undefined, + projectContent: string, + options?: CombineOptions + ): string { + const { + separator = '\n\n', + skipIfEmpty = true, + position = 'before' + } = options ?? {} + + if (skipIfEmpty && (globalContent == null || globalContent.trim().length === 0)) return projectContent // Skip if global content is undefined/null or empty/whitespace when skipIfEmpty is true + + const effectiveGlobalContent = globalContent ?? '' // If global content is null/undefined but skipIfEmpty is false, treat as empty string + + if (position === 'after') return `${projectContent}${separator}${effectiveGlobalContent}` // Combine based on position + + return `${effectiveGlobalContent}${separator}${projectContent}` // Default: 'before' + } + + protected transformCommandName( + cmd: CommandPrompt, + options?: CommandNameTransformOptions + ): string { + const {includeSeriesPrefix = true, seriesSeparator = '-'} = options ?? {} + + if (!includeSeriesPrefix || cmd.commandPrefix == null) return `${cmd.commandName}.md` // If prefix should not be included or prefix is not present, return just commandName + + return `${cmd.commandPrefix}${seriesSeparator}${cmd.commandName}.md` + } + + protected transformSubAgentName( + subAgent: SubAgentPrompt, + options?: SubAgentNameTransformOptions + ): string { + const includePrefix = options?.includePrefix ?? this.subAgentsConfig.includePrefix + const linkSymbol = options?.linkSymbol ?? this.subAgentsConfig.linkSymbol + const ext = options?.ext ?? this.subAgentsConfig.ext + const normalizedExt = ext.startsWith('.') ? ext : `.${ext}` + const hasPrefix = includePrefix && subAgent.agentPrefix != null && subAgent.agentPrefix.length > 0 + + if (!hasPrefix) return `${subAgent.agentName}${normalizedExt}` + return `${subAgent.agentPrefix}${linkSymbol}${subAgent.agentName}${normalizedExt}` + } + + protected getCommandSeriesOptions(ctx: OutputWriteContext): CommandSeriesPluginOverride { + const globalOptions = ctx.pluginOptions?.commandSeriesOptions + const pluginOverride = globalOptions?.pluginOverrides?.[this.name] + + const includeSeriesPrefix = pluginOverride?.includeSeriesPrefix ?? globalOptions?.includeSeriesPrefix // Only include properties that have defined values to satisfy exactOptionalPropertyTypes // Plugin-specific overrides take precedence over global settings + const seriesSeparator = pluginOverride?.seriesSeparator + + if (includeSeriesPrefix != null && seriesSeparator != null) return {includeSeriesPrefix, seriesSeparator} // Build result object conditionally to avoid assigning undefined to readonly properties + if (includeSeriesPrefix != null) return {includeSeriesPrefix} + if (seriesSeparator != null) return {seriesSeparator} + return {} + } + + protected getTransformOptionsFromContext( + ctx: OutputWriteContext, + additionalOptions?: CommandNameTransformOptions + ): CommandNameTransformOptions { + const seriesOptions = this.getCommandSeriesOptions(ctx) + + const includeSeriesPrefix = seriesOptions.includeSeriesPrefix ?? additionalOptions?.includeSeriesPrefix // Only include properties that have defined values to satisfy exactOptionalPropertyTypes // Merge: additionalOptions (plugin defaults) <- seriesOptions (config overrides) + const seriesSeparator = seriesOptions.seriesSeparator ?? additionalOptions?.seriesSeparator + + if (includeSeriesPrefix != null && seriesSeparator != null) return {includeSeriesPrefix, seriesSeparator} // Build result object conditionally to avoid assigning undefined to readonly properties + if (includeSeriesPrefix != null) return {includeSeriesPrefix} + if (seriesSeparator != null) return {seriesSeparator} + return {} + } + + protected shouldSkipDueToPlugin(ctx: OutputWriteContext, precedingPluginName: string): boolean { + const registeredPlugins = ctx.registeredPluginNames + if (registeredPlugins == null) return false + return registeredPlugins.includes(precedingPluginName) + } + + protected getRegistryWriter< + TEntry, + TRegistry extends RegistryData, + T extends RegistryWriter + >( + WriterClass: new (logger: ILogger) => T + ): T { + const cacheKey = WriterClass.name + + const cached = this.registryWriterCache.get(cacheKey) // Check cache first + if (cached != null) return cached as T + + const writer = new WriterClass(this.log) // Create new instance and cache it + this.registryWriterCache.set(cacheKey, writer as RegistryWriter) + return writer + } + + protected async registerInRegistry< + TEntry, + TRegistry extends RegistryData + >( + writer: RegistryWriter, + entries: readonly TEntry[], + ctx: OutputWriteContext + ): Promise { + return writer.register(entries, ctx.dryRun) + } + + protected normalizeRuleScope(rule: RulePrompt): RuleScope { + return rule.scope ?? 'project' + } + + protected normalizeSourceScope(scope: RuleScope | undefined): OutputDeclarationScope { + if (scope === 'workspace' || scope === 'global' || scope === 'project') return scope + return 'project' + } + + protected resolveCommandSourceScope(cmd: CommandPrompt): OutputDeclarationScope { + if (cmd.globalOnly === true) return 'global' + const scope = (cmd.yamlFrontMatter as {scope?: RuleScope} | undefined)?.scope + return this.normalizeSourceScope(scope) + } + + protected resolveSubAgentSourceScope(subAgent: SubAgentPrompt): OutputDeclarationScope { + const scope = (subAgent.yamlFrontMatter as {scope?: RuleScope} | undefined)?.scope + return this.normalizeSourceScope(scope) + } + + protected resolveSkillSourceScope(skill: SkillPrompt): OutputDeclarationScope { + const scope = (skill.yamlFrontMatter as {scope?: RuleScope} | undefined)?.scope + return this.normalizeSourceScope(scope) + } + + protected selectSingleScopeItems( + items: readonly T[], + sourceScopes: readonly OutputDeclarationScope[], + resolveScope: (item: T) => OutputDeclarationScope, + requestedScopes?: OutputScopeSelection + ): {readonly selectedScope?: OutputDeclarationScope, readonly items: readonly T[]} { + if (items.length === 0) return {items: []} + + const availableScopes = [...new Set(items.map(resolveScope))] + const selectedScopes = resolveTopicScopes({ + requestedScopes, + defaultScopes: sourceScopes, + supportedScopes: sourceScopes, + singleScope: true, + availableScopes + }) + const [selectedScope] = selectedScopes + if (selectedScope == null) return {items: []} + + return { + selectedScope, + items: items.filter(item => resolveScope(item) === selectedScope) + } + } + + protected selectRuleScopes( + ctx: OutputWriteContext, + rules: readonly RulePrompt[] + ): readonly OutputDeclarationScope[] { + const availableScopes = [...new Set(rules.map(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule))))] + return resolveTopicScopes({ + requestedScopes: this.getTopicScopeOverride(ctx, 'rules'), + defaultScopes: this.rulesConfig.sourceScopes ?? ['project', 'workspace', 'global'], + supportedScopes: this.rulesConfig.sourceScopes ?? ['project', 'workspace', 'global'], + singleScope: false, + availableScopes + }) + } + + protected selectPromptScopes( + ctx: OutputWriteContext, + supportedScopes: readonly OutputDeclarationScope[] = ['project', 'global'], + defaultScopes: readonly OutputDeclarationScope[] = supportedScopes + ): readonly OutputDeclarationScope[] { + return resolveTopicScopes({ + requestedScopes: this.getTopicScopeOverride(ctx, 'prompt'), + defaultScopes, + supportedScopes, + singleScope: false + }) + } + + protected getTopicScopeOverride( + ctx: OutputPluginContext | OutputWriteContext, + topic: OutputScopeTopic + ): OutputScopeSelection | undefined { + return ctx.pluginOptions?.outputScopes?.plugins?.[this.name]?.[topic] + } + + protected buildSkillFrontMatter( + skill: SkillPrompt, + options?: SkillFrontMatterOptions + ): Record { + const fm = skill.yamlFrontMatter + const result: Record = { + name: fm.name, + description: fm.description + } + + if ('displayName' in fm && fm.displayName != null) { // Conditionally add optional fields + result['displayName'] = fm.displayName + } + if ('keywords' in fm && fm.keywords != null && fm.keywords.length > 0) result['keywords'] = fm.keywords + if ('author' in fm && fm.author != null) result['author'] = fm.author + if ('version' in fm && fm.version != null) result['version'] = fm.version + + const includeTools = options?.includeTools ?? true // Handle tools based on options + if (includeTools && 'allowTools' in fm && fm.allowTools != null && fm.allowTools.length > 0) { + const toolFormat = options?.toolFormat ?? 'array' + result['allowTools'] = toolFormat === 'string' ? fm.allowTools.join(',') : fm.allowTools + } + + if (options?.additionalFields != null) { // Add any additional custom fields + Object.assign(result, options.additionalFields) + } + + return result + } + + protected buildRuleContent(rule: RulePrompt): string { + const fmData = this.rulesConfig.transformFrontMatter + ? this.rulesConfig.transformFrontMatter(rule) + : {globs: rule.globs.join(', ')} + + const sanitizedFmData = fmData == null || Object.keys(fmData).length === 0 + ? void 0 + : fmData + + return buildMarkdownWithFrontMatter(sanitizedFmData, rule.content) + } + + protected buildRuleFileName(rule: RulePrompt): string { + const prefix = `${this.rulesConfig.prefix ?? 'rule'}${this.rulesConfig.linkSymbol ?? '-'}` + const fileName = `${prefix}${rule.prefix}${this.rulesConfig.linkSymbol ?? '-'}${rule.ruleName}${this.rulesConfig.ext ?? '.md'}` + this.log.trace('buildRuleFileName', { + plugin: this.name, + rulePrefix: rule.prefix, + ruleName: rule.ruleName, + prefix: this.rulesConfig.prefix ?? 'rule', + linkSymbol: this.rulesConfig.linkSymbol ?? '-', + ext: this.rulesConfig.ext ?? '.md', + result: fileName + }) + return fileName + } + + async declareOutputFiles(ctx: OutputWriteContext): Promise { + return this.buildDefaultOutputDeclarations(ctx) + } + + async declareCleanupPaths(ctx: OutputCleanContext): Promise { + const cleanupDelete = this.buildCleanupTargetsFromScopeConfig(this.cleanupConfig.delete, 'delete', ctx) + const cleanupProtect = this.buildCleanupTargetsFromScopeConfig(this.cleanupConfig.protect, 'protect', ctx) + const {excludeScanGlobs} = this.cleanupConfig + + if (cleanupDelete.length === 0 && cleanupProtect.length === 0 && (excludeScanGlobs == null || excludeScanGlobs.length === 0)) return {} + + return { + ...cleanupDelete.length > 0 && {delete: cleanupDelete}, + ...cleanupProtect.length > 0 && {protect: cleanupProtect}, + ...excludeScanGlobs != null && excludeScanGlobs.length > 0 && {excludeScanGlobs} + } + } + + async convertContent( + declaration: OutputFileDeclaration, + _ctx: OutputWriteContext + ): Promise { + const source = declaration.source as DeclarativeOutputSource + + switch (source.kind) { + case 'projectRootMemory': + case 'projectChildMemory': + case 'globalMemory': + case 'skillReference': + case 'ignoreFile': return source.content + case 'command': return this.buildCommandContent(source.command) + case 'subAgent': return this.buildSubAgentContent(source.subAgent) + case 'skillMain': return this.buildSkillMainContent(source.skill) + case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + case 'rule': return this.buildRuleContent(source.rule) + default: throw new Error(`Unsupported declaration source for plugin ${this.name}`) + } + } + + protected async buildDefaultOutputDeclarations(ctx: OutputWriteContext): Promise { + const declarations: OutputFileDeclaration[] = [] + const { + workspace, + globalMemory, + commands, + subAgents, + skills, + rules, + aiAgentIgnoreConfigFiles + } = ctx.collectedOutputContext + const transformOptions = this.getTransformOptionsFromContext(ctx) + const ignoreOutputPath = this.getIgnoreOutputPath() + const ignoreFile = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + + const selectedCommands = this.commandOutputEnabled && commands != null + ? this.selectSingleScopeItems( + commands, + this.commandsConfig.sourceScopes, + cmd => this.resolveCommandSourceScope(cmd), + this.getTopicScopeOverride(ctx, 'commands') + ) + : {items: [] as readonly CommandPrompt[]} + + const selectedSubAgents = this.subAgentOutputEnabled && subAgents != null + ? this.selectSingleScopeItems( + subAgents, + this.subAgentsConfig.sourceScopes, + subAgent => this.resolveSubAgentSourceScope(subAgent), + this.getTopicScopeOverride(ctx, 'subagents') + ) + : {items: [] as readonly SubAgentPrompt[]} + + const selectedSkills = this.skillOutputEnabled && skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + + const allRules = rules ?? [] + const activeRuleScopes = this.ruleOutputEnabled && allRules.length > 0 + ? new Set(this.selectRuleScopes(ctx, allRules)) + : new Set() + const activePromptScopes = new Set(this.selectPromptScopes(ctx)) + + const rulesByScope: Record = { + project: [], + workspace: [], + global: [] + } + for (const rule of allRules) { + const ruleScope = this.normalizeSourceScope(this.normalizeRuleScope(rule)) + rulesByScope[ruleScope].push(rule) + } + + for (const project of workspace.projects) { + const projectDir = project.dirFromWorkspacePath + if (projectDir == null) continue + + if (this.outputFileName.length > 0 && activePromptScopes.has('project')) { + if (project.rootMemoryPrompt != null) { + declarations.push({ + path: this.resolveFullPath(projectDir), + scope: 'project', + source: {kind: 'projectRootMemory', content: project.rootMemoryPrompt.content as string} + }) + } + + if (project.childMemoryPrompts != null) { + for (const child of project.childMemoryPrompts) { + declarations.push({ + path: this.resolveFullPath(child.dir), + scope: 'project', + source: {kind: 'projectChildMemory', content: child.content as string} + }) + } + } + } + + const basePath = path.join(projectDir.basePath, projectDir.path, this.globalConfigDir) + const {projectConfig} = project + + if (selectedCommands.selectedScope === 'project' && selectedCommands.items.length > 0) { + const filteredCommands = filterByProjectConfig(selectedCommands.items, projectConfig, 'commands') + for (const cmd of filteredCommands) { + const fileName = this.transformCommandName(cmd, transformOptions) + declarations.push({ + path: path.join(basePath, this.commandsConfig.subDir, fileName), + scope: 'project', + source: {kind: 'command', command: cmd} + }) + } + } + + if (selectedSubAgents.selectedScope === 'project' && selectedSubAgents.items.length > 0) { + const filteredSubAgents = filterByProjectConfig(selectedSubAgents.items, projectConfig, 'subAgents') + for (const subAgent of filteredSubAgents) { + const fileName = this.transformSubAgentName(subAgent) + declarations.push({ + path: path.join(basePath, this.subAgentsConfig.subDir, fileName), + scope: 'project', + source: {kind: 'subAgent', subAgent} + }) + } + } + + if (selectedSkills.selectedScope === 'project' && selectedSkills.items.length > 0) { + const filteredSkills = filterByProjectConfig(selectedSkills.items, projectConfig, 'skills') + for (const skill of filteredSkills) { + const skillName = skill.yamlFrontMatter?.name ?? skill.dir.getDirectoryName() + const skillDir = path.join(basePath, this.skillsConfig.subDir, skillName) + + declarations.push({ + path: path.join(skillDir, 'SKILL.md'), + scope: 'project', + source: {kind: 'skillMain', skill} + }) + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, childDoc.dir.path.replace(/\.mdx$/, '.md')), + scope: 'project', + source: {kind: 'skillReference', content: childDoc.content as string} + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope: 'project', + source: {kind: 'skillResource', content: resource.content, encoding: resource.encoding} + }) + } + } + } + } + + if (activeRuleScopes.has('project')) { + const projectRules = applySubSeriesGlobPrefix( + filterByProjectConfig(rulesByScope.project, projectConfig, 'rules'), + projectConfig + ) + const rulesDir = path.join(basePath, this.rulesConfig.subDir ?? 'rules') + for (const rule of projectRules) { + declarations.push({ + path: path.join(rulesDir, this.buildRuleFileName(rule)), + scope: 'project', + source: {kind: 'rule', rule} + }) + } + } + + if ( + ignoreOutputPath != null + && ignoreFile != null + && project.isPromptSourceProject !== true + ) { + declarations.push({ + path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), + scope: 'project', + source: {kind: 'ignoreFile', content: ignoreFile.content} + }) + } + } + + const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) + const resolveScopedBasePath = (scope: OutputDeclarationScope): string => { + if (scope === 'global') return this.getGlobalConfigDir() + return this.getWorkspaceConfigDir(ctx) + } + + if ( + (selectedCommands.selectedScope === 'global' || selectedCommands.selectedScope === 'workspace') + && selectedCommands.items.length > 0 + ) { + const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') + const basePath = resolveScopedBasePath(selectedCommands.selectedScope) + for (const cmd of filteredCommands) { + const fileName = this.transformCommandName(cmd, transformOptions) + declarations.push({ + path: path.join(basePath, this.commandsConfig.subDir, fileName), + scope: selectedCommands.selectedScope, + source: {kind: 'command', command: cmd} + }) + } + } + + if ( + (selectedSubAgents.selectedScope === 'global' || selectedSubAgents.selectedScope === 'workspace') + && selectedSubAgents.items.length > 0 + ) { + const filteredSubAgents = filterByProjectConfig(selectedSubAgents.items, promptSourceProjectConfig, 'subAgents') + const basePath = resolveScopedBasePath(selectedSubAgents.selectedScope) + for (const subAgent of filteredSubAgents) { + const fileName = this.transformSubAgentName(subAgent) + declarations.push({ + path: path.join(basePath, this.subAgentsConfig.subDir, fileName), + scope: selectedSubAgents.selectedScope, + source: {kind: 'subAgent', subAgent} + }) + } + } + + if ( + (selectedSkills.selectedScope === 'global' || selectedSkills.selectedScope === 'workspace') + && selectedSkills.items.length > 0 + ) { + const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') + const basePath = resolveScopedBasePath(selectedSkills.selectedScope) + for (const skill of filteredSkills) { + const skillName = skill.yamlFrontMatter?.name ?? skill.dir.getDirectoryName() + const skillDir = path.join(basePath, this.skillsConfig.subDir, skillName) + + declarations.push({ + path: path.join(skillDir, 'SKILL.md'), + scope: selectedSkills.selectedScope, + source: {kind: 'skillMain', skill} + }) + + if (skill.childDocs != null) { + for (const childDoc of skill.childDocs) { + declarations.push({ + path: path.join(skillDir, childDoc.dir.path.replace(/\.mdx$/, '.md')), + scope: selectedSkills.selectedScope, + source: {kind: 'skillReference', content: childDoc.content as string} + }) + } + } + + if (skill.resources != null) { + for (const resource of skill.resources) { + declarations.push({ + path: path.join(skillDir, resource.relativePath), + scope: selectedSkills.selectedScope, + source: {kind: 'skillResource', content: resource.content, encoding: resource.encoding} + }) + } + } + } + } + + for (const ruleScope of ['global', 'workspace'] as const) { + if (!activeRuleScopes.has(ruleScope)) continue + const basePath = resolveScopedBasePath(ruleScope) + const filteredRules = applySubSeriesGlobPrefix( + filterByProjectConfig(rulesByScope[ruleScope], promptSourceProjectConfig, 'rules'), + promptSourceProjectConfig + ) + const rulesDir = path.join(basePath, this.rulesConfig.subDir ?? 'rules') + for (const rule of filteredRules) { + declarations.push({ + path: path.join(rulesDir, this.buildRuleFileName(rule)), + scope: ruleScope, + source: {kind: 'rule', rule} + }) + } + } + + if ( + globalMemory != null + && this.outputFileName.length > 0 + && activePromptScopes.has('global') + ) { + declarations.push({ + path: path.join(this.getGlobalConfigDir(), this.outputFileName), + scope: 'global', + source: {kind: 'globalMemory', content: globalMemory.content as string} + }) + } + + return declarations + } + + protected async buildCommandContent(cmd: CommandPrompt): Promise { + let compiledContent = cmd.content + let compiledFrontMatter = cmd.yamlFrontMatter + let useRecompiledFrontMatter = false + + if (cmd.rawMdxContent != null && this.toolPreset != null) { + this.log.debug('recompiling command with tool preset', { + file: cmd.dir.getAbsolutePath(), + toolPreset: this.toolPreset, + hasRawContent: true + }) + // eslint-disable-next-line ts/no-unsafe-assignment + const scopeCollector = new GlobalScopeCollector({toolPreset: this.toolPreset as any}) + const globalScope = scopeCollector.collect() + const result = await mdxToMd(cmd.rawMdxContent, {globalScope, extractMetadata: true, basePath: cmd.dir.basePath}) + compiledContent = result.content + compiledFrontMatter = result.metadata.fields as typeof cmd.yamlFrontMatter + useRecompiledFrontMatter = true + } + + const commandFrontMatterTransformer = this.commandsConfig.transformFrontMatter + if (commandFrontMatterTransformer == null) throw new Error(`commands.transformFrontMatter is required for command output plugin: ${this.name}`) + + const transformedFrontMatter = commandFrontMatterTransformer(cmd, { + isRecompiled: useRecompiledFrontMatter, + ...compiledFrontMatter != null && {sourceFrontMatter: compiledFrontMatter as Record} + }) + + return this.buildMarkdownContent(compiledContent, transformedFrontMatter) + } + + protected buildSubAgentContent(agent: SubAgentPrompt): string { + const subAgentFrontMatterTransformer = this.subAgentsConfig.transformFrontMatter + if (subAgentFrontMatterTransformer != null) { + const transformedFrontMatter = subAgentFrontMatterTransformer(agent, { + ...agent.yamlFrontMatter != null && {sourceFrontMatter: agent.yamlFrontMatter as Record} + }) + return this.buildMarkdownContent(agent.content, transformedFrontMatter) + } + + return this.buildMarkdownContentWithRaw( + agent.content, + agent.yamlFrontMatter, + agent.rawFrontMatter + ) + } + + protected buildSkillMainContent(skill: SkillPrompt): string { + return this.buildMarkdownContentWithRaw( + skill.content as string, + skill.yamlFrontMatter, + skill.rawFrontMatter + ) + } +} diff --git a/cli/src/plugins/plugin-shared/AbstractPlugin.ts b/cli/src/plugins/plugin-core/AbstractPlugin.ts similarity index 74% rename from cli/src/plugins/plugin-shared/AbstractPlugin.ts rename to cli/src/plugins/plugin-core/AbstractPlugin.ts index cd9f2b1c..24e2e323 100644 --- a/cli/src/plugins/plugin-shared/AbstractPlugin.ts +++ b/cli/src/plugins/plugin-core/AbstractPlugin.ts @@ -1,8 +1,8 @@ -import type {ILogger} from './log' -import type {PluginKind} from './types/Enums' -import type {Plugin} from './types/PluginTypes' +import type {ILogger} from '@truenine/logger' +import type {PluginKind} from './enums' +import type {Plugin} from './plugin' -import {createLogger} from './log' +import {createLogger} from '@truenine/logger' export abstract class AbstractPlugin implements Plugin { readonly type: T diff --git a/cli/src/plugins/plugin-shared/types/AindexTypes.ts b/cli/src/plugins/plugin-core/AindexTypes.ts similarity index 83% rename from cli/src/plugins/plugin-shared/types/AindexTypes.ts rename to cli/src/plugins/plugin-core/AindexTypes.ts index 6007f43a..0230ca74 100644 --- a/cli/src/plugins/plugin-shared/types/AindexTypes.ts +++ b/cli/src/plugins/plugin-core/AindexTypes.ts @@ -89,9 +89,9 @@ export const AINDEX_DIR_NAMES = { */ export const AINDEX_FILE_NAMES = { GLOBAL_MEMORY: 'global.mdx', // Global memory - GLOBAL_MEMORY_SRC: 'global.cn.mdx', + GLOBAL_MEMORY_SRC: 'global.src.mdx', WORKSPACE_MEMORY: 'workspace.mdx', // Workspace memory - WORKSPACE_MEMORY_SRC: 'workspace.cn.mdx', + WORKSPACE_MEMORY_SRC: 'workspace.src.mdx', EDITOR_CONFIG: '.editorconfig', // EditorConfig IDEA_GITIGNORE: '.idea/.gitignore', // JetBrains IDE IDEA_PROJECT_XML: '.idea/codeStyles/Project.xml', @@ -113,8 +113,8 @@ export const AINDEX_RELATIVE_PATHS = { SRC_COMMANDS: 'src/commands', SRC_AGENTS: 'src/agents', SRC_RULES: 'src/rules', - SRC_GLOBAL_MEMORY: 'app/global.cn.mdx', - SRC_WORKSPACE_MEMORY: 'app/workspace.cn.mdx', + SRC_GLOBAL_MEMORY: 'app/global.src.mdx', + SRC_WORKSPACE_MEMORY: 'app/workspace.src.mdx', DIST_SKILLS: 'dist/skills', // Distribution paths DIST_COMMANDS: 'dist/commands', DIST_AGENTS: 'dist/agents', @@ -134,22 +134,22 @@ export const DEFAULT_AINDEX_STRUCTURE: AindexDirectory = { skills: { name: AINDEX_DIR_NAMES.SKILLS, required: false, - description: 'Skill source files (.cn.mdx)' + description: 'Skill source files (.src.mdx)' }, commands: { name: AINDEX_DIR_NAMES.COMMANDS, required: false, - description: 'Fast command source files (.cn.mdx)' + description: 'Fast command source files (.src.mdx)' }, agents: { name: AINDEX_DIR_NAMES.AGENTS, required: false, - description: 'Sub-agent source files (.cn.mdx)' + description: 'Sub-agent source files (.src.mdx)' }, rules: { name: AINDEX_DIR_NAMES.RULES, required: false, - description: 'Rule source files (.cn.mdx)' + description: 'Rule source files (.src.mdx)' }, globalMemoryFile: { name: AINDEX_FILE_NAMES.GLOBAL_MEMORY_SRC, @@ -295,34 +295,4 @@ export type AindexFileName = (typeof AINDEX_FILE_NAMES)[keyof typeof AINDEX_FILE /** * Type for relative paths */ -export type AindexRelativePath = (typeof AINDEX_RELATIVE_PATHS)[keyof typeof AINDEX_RELATIVE_PATHS] // Backward compatibility aliases (deprecated, use Aindex* versions instead) - -/** @deprecated Use AindexFileEntry instead */ -export type ShadowSourceFileEntry = AindexFileEntry - -/** @deprecated Use AindexDirectoryEntry instead */ -export type ShadowSourceDirectoryEntry = AindexDirectoryEntry - -/** @deprecated Use AindexDirectory instead */ -export type ShadowSourceProjectDirectory = AindexDirectory - -/** @deprecated Use AindexDirName instead */ -export type ShadowSourceDirName = AindexDirName - -/** @deprecated Use AindexFileName instead */ -export type ShadowSourceFileName = AindexFileName - -/** @deprecated Use AindexRelativePath instead */ -export type ShadowSourceRelativePath = AindexRelativePath - -/** @deprecated Use AINDEX_DIR_NAMES instead */ -export const SHADOW_SOURCE_DIR_NAMES = AINDEX_DIR_NAMES - -/** @deprecated Use AINDEX_FILE_NAMES instead */ -export const SHADOW_SOURCE_FILE_NAMES = AINDEX_FILE_NAMES - -/** @deprecated Use AINDEX_RELATIVE_PATHS instead */ -export const SHADOW_SOURCE_RELATIVE_PATHS = AINDEX_RELATIVE_PATHS - -/** @deprecated Use DEFAULT_AINDEX_STRUCTURE instead */ -export const DEFAULT_SHADOW_SOURCE_PROJECT_STRUCTURE = DEFAULT_AINDEX_STRUCTURE +export type AindexRelativePath = (typeof AINDEX_RELATIVE_PATHS)[keyof typeof AINDEX_RELATIVE_PATHS] diff --git a/cli/src/plugins/plugin-core/ConfigTypes.schema.ts b/cli/src/plugins/plugin-core/ConfigTypes.schema.ts new file mode 100644 index 00000000..a948a3f0 --- /dev/null +++ b/cli/src/plugins/plugin-core/ConfigTypes.schema.ts @@ -0,0 +1,166 @@ +import {z} from 'zod/v3' + +/** + * Zod schema for a source/dist path pair. + * Both paths are relative to the aindex project root. + */ +export const ZAindexDirPair = z.object({src: z.string(), dist: z.string()}) + +/** + * Zod schema for the aindex configuration. + * All paths are relative to /. + */ +export const ZAindexConfig = z.object({ + dir: z.string().default('aindex'), + skills: ZAindexDirPair, + commands: ZAindexDirPair, + subAgents: ZAindexDirPair, + rules: ZAindexDirPair, + globalPrompt: ZAindexDirPair, + workspacePrompt: ZAindexDirPair, + app: ZAindexDirPair, + ext: ZAindexDirPair, + arch: ZAindexDirPair +}) + +/** + * Zod schema for per-plugin command series override options. + */ +export const ZCommandSeriesPluginOverride = z.object({ + includeSeriesPrefix: z.boolean().optional(), + seriesSeparator: z.string().optional() +}) + +/** + * Zod schema for command series configuration options. + */ +export const ZCommandSeriesOptions = z.object({ + includeSeriesPrefix: z.boolean().optional(), + pluginOverrides: z.record(z.string(), ZCommandSeriesPluginOverride).optional() +}) + +/** + * Zod schema for output scope value. + */ +export const ZOutputScope = z.enum(['project', 'workspace', 'global']) + +/** + * Zod schema for selecting one or more scopes. + */ +export const ZOutputScopeSelection = z.union([ZOutputScope, z.array(ZOutputScope).min(1)]) + +/** + * Zod schema for per-plugin topic scope overrides. + */ +export const ZPluginOutputScopeTopics = z.object({ + prompt: ZOutputScopeSelection.optional(), + rules: ZOutputScopeSelection.optional(), + commands: ZOutputScopeSelection.optional(), + subagents: ZOutputScopeSelection.optional(), + skills: ZOutputScopeSelection.optional(), + mcp: ZOutputScopeSelection.optional() +}) + +/** + * Zod schema for output scope override configuration. + */ +export const ZOutputScopeOptions = z.object({plugins: z.record(z.string(), ZPluginOutputScopeTopics).optional()}) + +export const ZProtectionMode = z.enum(['direct', 'recursive']) +export const ZProtectionRuleMatcher = z.enum(['path', 'glob']) + +export const ZCleanupProtectionRule = z.object({ + path: z.string(), + protectionMode: ZProtectionMode, + matcher: ZProtectionRuleMatcher.optional(), + reason: z.string().optional() +}) + +export const ZCleanupProtectionOptions = z.object({rules: z.array(ZCleanupProtectionRule).optional()}) + +/** + * Zod schema for user profile information. + */ +export const ZUserProfile = z.object({ + name: z.string().optional(), + username: z.string().optional(), + gender: z.string().optional(), + birthday: z.string().optional() +}).catchall(z.unknown()) + +/** + * Zod schema for the user configuration file (.tnmsc.json). + */ +export const ZUserConfigFile = z.object({ + version: z.string().optional(), + workspaceDir: z.string().optional(), + aindex: ZAindexConfig.optional(), + logLevel: z.enum(['trace', 'debug', 'info', 'warn', 'error']).optional(), + commandSeriesOptions: ZCommandSeriesOptions.optional(), + outputScopes: ZOutputScopeOptions.optional(), + cleanupProtection: ZCleanupProtectionOptions.optional(), + profile: ZUserProfile.optional() +}) + +/** + * Zod schema for MCP project config. + */ +export const ZMcpProjectConfig = z.object({names: z.array(z.string()).optional()}) + +/** + * Zod schema for per-type series filtering configuration. + */ +export const ZTypeSeriesConfig = z.object({ + includeSeries: z.array(z.string()).optional(), + subSeries: z.record(z.string(), z.array(z.string())).optional() +}) + +/** + * Zod schema for project config. + */ +export const ZProjectConfig = z.object({ + mcp: ZMcpProjectConfig.optional(), + includeSeries: z.array(z.string()).optional(), + subSeries: z.record(z.string(), z.array(z.string())).optional(), + rules: ZTypeSeriesConfig.optional(), + skills: ZTypeSeriesConfig.optional(), + subAgents: ZTypeSeriesConfig.optional(), + commands: ZTypeSeriesConfig.optional() +}) + +/** + * Zod schema for ConfigLoader options. + */ +export const ZConfigLoaderOptions = z.object({ + configFileName: z.string().optional(), + searchPaths: z.array(z.string()).optional(), + searchCwd: z.boolean().optional(), + searchGlobal: z.boolean().optional() +}) + +export type AindexDirPair = z.infer +export type AindexConfig = z.infer +export type CommandSeriesPluginOverride = z.infer +export type CommandSeriesOptions = z.infer +export type OutputScope = z.infer +export type OutputScopeSelection = z.infer +export type PluginOutputScopeTopics = z.infer +export type OutputScopeOptions = z.infer +export type ProtectionMode = z.infer +export type ProtectionRuleMatcher = z.infer +export type CleanupProtectionRule = z.infer +export type CleanupProtectionOptions = z.infer +export type UserConfigFile = z.infer +export type McpProjectConfig = z.infer +export type TypeSeriesConfig = z.infer +export type ProjectConfig = z.infer +export type ConfigLoaderOptions = z.infer + +/** + * Result of loading a config file. + */ +export interface ConfigLoadResult { + readonly config: UserConfigFile + readonly source: string | null + readonly found: boolean +} diff --git a/cli/src/plugins/plugin-shared/types/ExportMetadataTypes.ts b/cli/src/plugins/plugin-core/ExportMetadataTypes.ts similarity index 99% rename from cli/src/plugins/plugin-shared/types/ExportMetadataTypes.ts rename to cli/src/plugins/plugin-core/ExportMetadataTypes.ts index 7d16353f..63e9c787 100644 --- a/cli/src/plugins/plugin-shared/types/ExportMetadataTypes.ts +++ b/cli/src/plugins/plugin-core/ExportMetadataTypes.ts @@ -6,7 +6,7 @@ * @module ExportMetadataTypes */ -import type {CodingAgentTools, NamingCaseKind, RuleScope} from './Enums' +import type {CodingAgentTools, NamingCaseKind, RuleScope} from './enums' import type {SeriName} from './PromptTypes' /** diff --git a/cli/src/plugins/plugin-input-shared/scope/GlobalScopeCollector.ts b/cli/src/plugins/plugin-core/GlobalScopeCollector.ts similarity index 50% rename from cli/src/plugins/plugin-input-shared/scope/GlobalScopeCollector.ts rename to cli/src/plugins/plugin-core/GlobalScopeCollector.ts index af3cce4c..45042e26 100644 --- a/cli/src/plugins/plugin-input-shared/scope/GlobalScopeCollector.ts +++ b/cli/src/plugins/plugin-core/GlobalScopeCollector.ts @@ -1,5 +1,6 @@ +import type {EvaluationScope} from '@truenine/md-compiler' import type {EnvironmentContext, MdComponent, MdxGlobalScope, OsInfo, ToolReferences, UserProfile} from '@truenine/md-compiler/globals' // Collects and manages global scope variables for MDX expression evaluation. // src/scope/GlobalScopeCollector.ts -import type {UserConfigFile} from '../../plugin-shared' +import type {UserConfigFile} from './types' import * as os from 'node:os' import process from 'node:process' import {OsKind, ShellKind, ToolPresets} from '@truenine/md-compiler/globals' @@ -115,3 +116,115 @@ export class GlobalScopeCollector { return mdComponent } } + +/** + * Represents a single scope registration + */ +export interface ScopeRegistration { + readonly namespace: string + readonly values: Record + readonly priority: number +} + +/** + * Priority levels for scope sources. + * Higher values take precedence over lower values during merge. + */ +export enum ScopePriority { + /** System default values (os, default tool) */ + SystemDefault = 0, + /** Values from configuration file (profile, custom tool) */ + UserConfig = 10, + /** Values registered by plugins */ + PluginRegistered = 20, + /** Values passed at MDX compile time */ + CompileTime = 30 +} + +/** + * Registry for managing and merging scopes from multiple sources. + * Handles priority-based resolution when the same key exists in multiple sources. + */ +export class ScopeRegistry { + private readonly registrations: ScopeRegistration[] = [] + private globalScope: MdxGlobalScope | null = null + + setGlobalScope(scope: MdxGlobalScope): void { + this.globalScope = scope + } + + getGlobalScope(): MdxGlobalScope | null { + return this.globalScope + } + + register( + namespace: string, + values: Record, + priority: ScopePriority = ScopePriority.PluginRegistered + ): void { + this.registrations.push({namespace, values, priority}) + } + + getRegistrations(): readonly ScopeRegistration[] { + return this.registrations + } + + merge(compileTimeScope?: EvaluationScope): EvaluationScope { + const result: EvaluationScope = {} + + if (this.globalScope != null) { // 1. First add global scope (lowest priority) + result['os'] = {...this.globalScope.os} + result['env'] = {...this.globalScope.env} + result['profile'] = {...this.globalScope.profile} + result['tool'] = {...this.globalScope.tool} + } + + const sorted = [...this.registrations].sort((a, b) => a.priority - b.priority) // 2. Sort by priority and merge registered scopes + for (const reg of sorted) result[reg.namespace] = this.deepMerge(result[reg.namespace] as Record | undefined, reg.values) + + if (compileTimeScope != null) { // 3. Finally merge compile-time scope (highest priority) + for (const [key, value] of Object.entries(compileTimeScope)) { + result[key] = typeof value === 'object' && value !== null && !Array.isArray(value) + ? this.deepMerge(result[key] as Record | undefined, value as Record) + : value + } + } + + return result + } + + private deepMerge( + target: Record | undefined, + source: Record + ): Record { + if (target == null) return {...source} + + const result = {...target} + for (const [key, value] of Object.entries(source)) { + result[key] = typeof value === 'object' + && value !== null + && !Array.isArray(value) + && typeof result[key] === 'object' + && result[key] !== null + && !Array.isArray(result[key]) + ? this.deepMerge(result[key] as Record, value as Record) + : value + } + return result + } + + resolve(expression: string): string { + const scope = this.merge() + return expression.replaceAll(/\$\{([^}]+)\}/g, (_, key: string) => { + const parts = key.split('.') + let value: unknown = scope + for (const part of parts) value = (value as Record)?.[part] + return value != null ? String(value) : `\${${key}}` + }) + } + + clear(): void { + this.registrations.length = 0 + this.globalScope = null + } +} diff --git a/cli/src/plugins/plugin-shared/types/InputTypes.ts b/cli/src/plugins/plugin-core/InputTypes.ts similarity index 53% rename from cli/src/plugins/plugin-shared/types/InputTypes.ts rename to cli/src/plugins/plugin-core/InputTypes.ts index b687c68e..f7e24205 100644 --- a/cli/src/plugins/plugin-shared/types/InputTypes.ts +++ b/cli/src/plugins/plugin-core/InputTypes.ts @@ -4,15 +4,16 @@ import type { IDEKind, PromptKind, RuleScope -} from './Enums' -import type {FileContent, Path, RelativePath} from './FileSystemTypes' -import type {LocalizedPrompt, PromptsContext} from './LocalizedTypes' +} from './enums' import type { CommandYAMLFrontMatter, + FileContent, GlobalMemoryPrompt, + Path, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt, Prompt, + RelativePath, RuleYAMLFrontMatter, SeriName, SkillYAMLFrontMatter, @@ -46,28 +47,21 @@ export interface ProjectIDEConfigFile exte export interface AIAgentIgnoreConfigFile { readonly fileName: string readonly content: string + readonly sourcePath?: string } /** - * All collected output information, provided to plugin system as input for output plugins + * Shared context fields across input aggregation and output execution. */ -export interface CollectedInputContext { +interface CollectedContextData { readonly workspace: Workspace - readonly prompts?: PromptsContext // New unified prompts container with localization support - readonly promptIndex?: Map // Quick lookup index for all localized prompts - /** Legacy fields (deprecated, kept for backward compatibility) */ - /** @deprecated Use prompts.skills instead */ + /** Flat prompt projections used by current output plugins */ readonly skills?: readonly SkillPrompt[] - /** @deprecated Use prompts.commands instead */ readonly commands?: readonly CommandPrompt[] - /** @deprecated Use prompts.subAgents instead */ readonly subAgents?: readonly SubAgentPrompt[] - /** @deprecated Use prompts.rules instead */ readonly rules?: readonly RulePrompt[] - /** @deprecated Use prompts.readme instead */ readonly readmePrompts?: readonly ReadmePrompt[] - /** @deprecated Use prompts.globalMemory instead */ readonly globalMemory?: GlobalMemoryPrompt /** Other non-prompt fields */ @@ -80,12 +74,50 @@ export interface CollectedInputContext { readonly aindexDir?: string } +/** + * Input-side collected context. + * Built incrementally by input plugins through dependency-aware merging. + */ +export interface InputCollectedContext extends CollectedContextData {} + +/** + * Output-side collected context. + * Produced once from input context and consumed by output plugins only. + */ +export interface OutputCollectedContext extends CollectedContextData {} + +/** + * Convert input context to output context boundary object. + * This keeps input and output stages decoupled while preserving data shape. + */ +export function toOutputCollectedContext(input: InputCollectedContext): OutputCollectedContext { + return { + workspace: { + directory: input.workspace.directory, + projects: [...input.workspace.projects] + }, + ...input.skills != null && {skills: [...input.skills]}, + ...input.commands != null && {commands: [...input.commands]}, + ...input.subAgents != null && {subAgents: [...input.subAgents]}, + ...input.rules != null && {rules: [...input.rules]}, + ...input.readmePrompts != null && {readmePrompts: [...input.readmePrompts]}, + ...input.globalMemory != null && {globalMemory: input.globalMemory}, + ...input.vscodeConfigFiles != null && {vscodeConfigFiles: [...input.vscodeConfigFiles]}, + ...input.jetbrainsConfigFiles != null && {jetbrainsConfigFiles: [...input.jetbrainsConfigFiles]}, + ...input.editorConfigFiles != null && {editorConfigFiles: [...input.editorConfigFiles]}, + ...input.aiAgentIgnoreConfigFiles != null && {aiAgentIgnoreConfigFiles: [...input.aiAgentIgnoreConfigFiles]}, + ...input.globalGitIgnore != null && {globalGitIgnore: input.globalGitIgnore}, + ...input.shadowGitExclude != null && {shadowGitExclude: input.shadowGitExclude}, + ...input.aindexDir != null && {aindexDir: input.aindexDir} + } +} + /** * Rule prompt with glob patterns for file-scoped rule application */ export interface RulePrompt extends Prompt { readonly type: PromptKind.Rule - readonly series: string + readonly prefix: string readonly ruleName: string readonly globs: readonly string[] readonly scope: RuleScope @@ -130,29 +162,6 @@ export interface SkillChildDoc extends Prompt { */ export type SkillResourceEncoding = 'text' | 'base64' -/** - * Resource category for classification - * - * Categories: - * - code: .kt, .java, .py, .ts, .js, .go, .rs, etc. - * - data: .sql, .json, .xml, .yaml, .csv, etc. - * - document: .txt, .rtf, .docx, .pdf, etc. - * - config: .ini, .conf, .properties, etc. - * - script: .sh, .bash, .ps1, .bat, etc. - * - image: .png, .jpg, .gif, .svg, .webp, etc. - * - binary: .exe, .dll, .so, .wasm, etc. - * - other: anything else - */ -export type SkillResourceCategory - = | 'code' - | 'data' - | 'document' - | 'config' - | 'script' - | 'image' - | 'binary' - | 'other' - /** * Skill resource file for AI on-demand access * Any non-.md file in skill directory or subdirectories @@ -171,204 +180,13 @@ export interface SkillResource { readonly extension: string readonly fileName: string readonly relativePath: string + readonly sourcePath?: string readonly content: string readonly encoding: SkillResourceEncoding - readonly category: SkillResourceCategory readonly length: number readonly mimeType?: string } -/** - * Text file extensions that should be read as UTF-8 - */ -export const SKILL_RESOURCE_TEXT_EXTENSIONS = [ - '.kt', // Code files - '.java', - '.py', - '.pyi', - '.pyx', - '.ts', - '.tsx', - '.js', - '.jsx', - '.mjs', - '.cjs', - '.go', - '.rs', - '.c', - '.cpp', - '.cc', - '.h', - '.hpp', - '.hxx', - '.cs', - '.fs', - '.fsx', - '.vb', - '.rb', - '.php', - '.swift', - '.scala', - '.groovy', - '.lua', - '.r', - '.R', - '.jl', - '.ex', - '.exs', - '.erl', - '.clj', - '.cljs', - '.hs', - '.ml', - '.mli', - '.nim', - '.zig', - '.v', - '.dart', - '.vue', - '.svelte', - '.sql', // Data files - '.json', - '.jsonc', - '.json5', - '.xml', - '.xsd', - '.xsl', - '.xslt', - '.yaml', - '.yml', - '.toml', - '.csv', - '.tsv', - '.graphql', - '.gql', - '.proto', - '.txt', // Document files - '.text', - '.rtf', - '.log', - '.ini', // Config files - '.conf', - '.cfg', - '.config', - '.properties', - '.env', - '.envrc', - '.editorconfig', - '.gitignore', - '.gitattributes', - '.npmrc', - '.nvmrc', - '.npmignore', - '.eslintrc', - '.prettierrc', - '.stylelintrc', - '.babelrc', - '.browserslistrc', - '.sh', // Script files - '.bash', - '.zsh', - '.fish', - '.ps1', - '.psm1', - '.psd1', - '.bat', - '.cmd', - '.html', // Web files - '.htm', - '.xhtml', - '.css', - '.scss', - '.sass', - '.less', - '.styl', - '.svg', - '.ejs', // Template files - '.hbs', - '.mustache', - '.pug', - '.jade', - '.jinja', - '.jinja2', - '.j2', - '.erb', - '.haml', - '.slim', - '.d.ts', // Declaration files - '.d.mts', - '.d.cts', - '.diff', // Other text formats - '.patch', - '.asm', - '.s', - '.makefile', - '.mk', - '.dockerfile', - '.tf', - '.tfvars', // Terraform - '.prisma', // Prisma - '.mdx' // MDX (but not .md which is handled separately) -] as const - -/** - * Binary file extensions that should be read as base64 - */ -export const SKILL_RESOURCE_BINARY_EXTENSIONS = [ - '.docx', // Documents - '.doc', - '.xlsx', - '.xls', - '.pptx', - '.ppt', - '.pdf', - '.odt', - '.ods', - '.odp', - '.png', // Images - '.jpg', - '.jpeg', - '.gif', - '.webp', - '.ico', - '.bmp', - '.tiff', - '.zip', // Archives - '.tar', - '.gz', - '.bz2', - '.7z', - '.rar', - '.pyd', // Compiled - '.pyc', - '.pyo', - '.class', - '.jar', - '.war', - '.dll', - '.so', - '.dylib', - '.exe', - '.bin', - '.wasm', - '.ttf', // Fonts - '.otf', - '.woff', - '.woff2', - '.eot', - '.mp3', // Audio/Video (usually not needed but for completeness) - '.wav', - '.ogg', - '.mp4', - '.webm', - '.db', // Database - '.sqlite', - '.sqlite3' -] as const - -export type SkillResourceTextExtension = typeof SKILL_RESOURCE_TEXT_EXTENSIONS[number] -export type SkillResourceBinaryExtension = typeof SKILL_RESOURCE_BINARY_EXTENSIONS[number] - /** * MCP server configuration entry */ @@ -429,3 +247,116 @@ export interface ReadmePrompt extends Prompt { readonly isRoot: boolean readonly fileKind: ReadmeFileKind } + +/** + * Supported locale codes + */ +export type Locale = 'zh' | 'en' + +export type LocalizedFileExtension = string | readonly string[] + +/** + * Localized content wrapper for a single locale + * Contains both compiled content and raw MDX source + */ +export interface LocalizedContent { + /** Compiled/processed content */ + readonly content: string + + /** Original MDX source (before compilation) */ + readonly rawMdx?: string + + /** Extracted front matter */ + readonly frontMatter?: Record + + /** File last modified timestamp */ + readonly lastModified: Date + + /** Full prompt object (optional, for extended access) */ + readonly prompt?: T + + /** Absolute file path */ + readonly filePath: string +} + +/** + * Source content container for all locales + */ +export interface LocalizedSource { + /** Default source content (.src.mdx) */ + readonly zh?: LocalizedContent + + /** English content (.mdx) */ + readonly en?: LocalizedContent + + /** Default locale content (typically zh) */ + readonly default: LocalizedContent + + /** Which locale is the default */ + readonly defaultLocale: Locale +} + +/** Universal localized prompt wrapper */ +export interface LocalizedPrompt { + readonly name: string // Prompt identifier name + readonly type: K // Prompt type kind + readonly src: LocalizedSource // Source files content (src directory) + readonly dist?: LocalizedContent // Compiled/dist content (dist directory, optional) + + /** Metadata flags */ + readonly metadata: { + readonly hasDist: boolean // Whether dist content exists + readonly hasMultipleLocales: boolean // Whether multiple locales exist in src + readonly isDirectoryStructure: boolean // Whether this is a directory-based prompt (like skills) + + /** Available child items (for directory structures) */ + readonly children?: string[] + } + + /** File paths for all variants */ + readonly paths: { + readonly zh?: string + readonly en?: string + readonly dist?: string + } +} + +/** + * Options for reading localized prompts from different structures + */ +export interface LocalizedReadOptions { + /** File extensions for each locale */ + readonly localeExtensions: { + readonly zh: LocalizedFileExtension + readonly en: LocalizedFileExtension + } + + /** Entry file name (without extension, e.g., 'skill' for skills) */ + readonly entryFileName?: string + + /** Create prompt from content */ + readonly createPrompt: (content: string, locale: Locale, name: string, metadata?: Record) => T | Promise + + /** Prompt kind */ + readonly kind: K + + /** Whether this is a directory-based structure */ + readonly isDirectoryStructure: boolean +} + +/** + * Result of reading a directory structure (like skills) + */ +export interface DirectoryReadResult { + readonly prompts: LocalizedPrompt[] + readonly errors: ReadError[] +} + +/** + * Error during reading + */ +export interface ReadError { + readonly path: string + readonly error: Error + readonly phase: 'scan' | 'read' | 'compile' +} diff --git a/cli/src/plugins/plugin-input-shared/LocalizedPromptReader.ts b/cli/src/plugins/plugin-core/LocalizedPromptReader.ts similarity index 65% rename from cli/src/plugins/plugin-input-shared/LocalizedPromptReader.ts rename to cli/src/plugins/plugin-core/LocalizedPromptReader.ts index 5aa391c7..4ade013e 100644 --- a/cli/src/plugins/plugin-input-shared/LocalizedPromptReader.ts +++ b/cli/src/plugins/plugin-core/LocalizedPromptReader.ts @@ -1,14 +1,16 @@ import type {MdxGlobalScope} from '@truenine/md-compiler/globals' import type { DirectoryReadResult, + ILogger, Locale, LocalizedContent, + LocalizedFileExtension, LocalizedPrompt, LocalizedReadOptions, Prompt, PromptKind, ReadError -} from '../plugin-shared' +} from './types' import {mdxToMd} from '@truenine/md-compiler' import {parseMarkdown} from '@truenine/md-compiler/markdown' // Re-export types for convenience @@ -25,7 +27,7 @@ export class LocalizedPromptReader { constructor( private fs: typeof import('node:fs'), private path: typeof import('node:path'), - private logger: import('../plugin-shared').ILogger, + private logger: ILogger, private globalScope?: MdxGlobalScope ) {} @@ -100,11 +102,38 @@ export class LocalizedPromptReader { this.logger.debug(`readFlatFiles: srcDir=${srcDir}, exists=${srcExists}`) this.logger.debug(`readFlatFiles: distDir=${distDir}, exists=${distExists}`) - if (!srcExists) return {prompts, errors} + if (!srcExists && !distExists) return {prompts, errors} - const zhExtension = options.localeExtensions.zh // Find all .cn.mdx files (Chinese source files) + const zhExtensions = this.normalizeExtensions(options.localeExtensions.zh) + const seenNames = new Set() + + const readPrompt = async (fullName: string, filePath: string): Promise => { + if (seenNames.has(fullName)) return + seenNames.add(fullName) + + try { + const localized = await this.readFlatEntry( + fullName, + srcDir, + distDir, + fullName, + options + ) + + if (localized) prompts.push(localized) + } catch (error) { + errors.push({ + path: filePath, + error: error as Error, + phase: 'read' + }) + this.logger.error(`Failed to read file: ${filePath}`, {error}) + } + } + + const scanSourceDirectory = async (currentSrcDir: string, relativePath: string = ''): Promise => { + if (!this.exists(currentSrcDir)) return - const scanDirectory = async (currentSrcDir: string, currentDistDir: string, relativePath: string = ''): Promise => { try { const entries = this.fs.readdirSync(currentSrcDir, {withFileTypes: true}) for (const entry of entries) { @@ -113,38 +142,19 @@ export class LocalizedPromptReader { : entry.name if (entry.isDirectory()) { - const subSrcDir = this.path.join(currentSrcDir, entry.name) // Recursively scan subdirectories - const subDistDir = this.path.join(currentDistDir, entry.name) - await scanDirectory(subSrcDir, subDistDir, entryRelativePath) + await scanSourceDirectory(this.path.join(currentSrcDir, entry.name), entryRelativePath) continue } - if (!entry.isFile() || !entry.name.endsWith(zhExtension)) continue + const matchedExtension = this.findMatchingExtension(entry.name, zhExtensions) + if (!entry.isFile() || matchedExtension == null) continue - const baseName = entry.name.slice(0, -zhExtension.length) // Extract name without extension (e.g., "compile.cn.mdx" -> "compile") - const srcFilePath = this.path.join(currentSrcDir, entry.name) - const fullName = relativePath // Use relative path as the name to preserve series/subdirectory info (e.g., "auqt/boot") + const baseName = entry.name.slice(0, -matchedExtension.length) + const fullName = relativePath ? this.path.join(relativePath, baseName) : baseName - try { - const localized = await this.readFlatEntry( - fullName, - srcDir, - distDir, - fullName, - options - ) - - if (localized) prompts.push(localized) - } catch (error) { - errors.push({ - path: srcFilePath, - error: error as Error, - phase: 'read' - }) - this.logger.error(`Failed to read file: ${entry.name}`, {error}) - } + await readPrompt(fullName, this.path.join(currentSrcDir, entry.name)) } } catch (error) { errors.push({ @@ -156,7 +166,42 @@ export class LocalizedPromptReader { } } - await scanDirectory(srcDir, distDir) + const scanDistDirectory = async (currentDistDir: string, relativePath: string = ''): Promise => { + if (!this.exists(currentDistDir)) return + + try { + const entries = this.fs.readdirSync(currentDistDir, {withFileTypes: true}) + for (const entry of entries) { + const entryRelativePath = relativePath + ? this.path.join(relativePath, entry.name) + : entry.name + + if (entry.isDirectory()) { + await scanDistDirectory(this.path.join(currentDistDir, entry.name), entryRelativePath) + continue + } + + if (!entry.isFile() || !entry.name.endsWith('.mdx')) continue + + const baseName = entry.name.slice(0, -'.mdx'.length) + const fullName = relativePath + ? this.path.join(relativePath, baseName) + : baseName + + await readPrompt(fullName, this.path.join(currentDistDir, entry.name)) + } + } catch (error) { + errors.push({ + path: currentDistDir, + error: error as Error, + phase: 'scan' + }) + this.logger.error(`Failed to scan directory: ${currentDistDir}`, {error}) + } + } + + if (srcExists) await scanSourceDirectory(srcDir) + if (distExists) await scanDistDirectory(distDir) return {prompts, errors} } @@ -187,8 +232,10 @@ export class LocalizedPromptReader { const {localeExtensions, entryFileName, createPrompt, kind} = options const baseFileName = entryFileName ?? name - const srcZhPath = this.path.join(srcEntryDir, `${baseFileName}${localeExtensions.zh}`) - const srcEnPath = this.path.join(srcEntryDir, `${baseFileName}${localeExtensions.en}`) + const zhExtensions = this.normalizeExtensions(localeExtensions.zh) + const enExtensions = this.normalizeExtensions(localeExtensions.en) + const srcZhPath = this.resolveLocalizedPath(srcEntryDir, baseFileName, zhExtensions) + const srcEnPath = this.resolveLocalizedPath(srcEntryDir, baseFileName, enExtensions) const distPath = this.path.join(distEntryDir, `${baseFileName}.mdx`) const distContent = await this.readDistContent(distPath, createPrompt, name) // Read both src and dist independently - no fallback logic @@ -199,8 +246,8 @@ export class LocalizedPromptReader { const hasSrcZh = zhContent != null const hasSrcEn = enContent != null - if (!hasDist && !hasSrcZh) { // If neither src nor dist exists, return null - this.logger.warn(`Missing both dist and Chinese source for: ${name}`) + if (!hasDist && !hasSrcZh) { // If neither src nor source file exists, return null + this.logger.warn(`Missing both dist and source file for: ${name}`) return null } @@ -220,7 +267,7 @@ export class LocalizedPromptReader { let children: string[] | undefined if (isDirectoryStructure) { const scanDir = hasDist ? distEntryDir : srcEntryDir // Scan children from dist if available, otherwise from src - children = this.scanChildren(scanDir, baseFileName, localeExtensions.zh) + children = this.scanChildren(scanDir, baseFileName, zhExtensions) } return { @@ -255,8 +302,10 @@ export class LocalizedPromptReader { ): Promise | null> { const {localeExtensions, createPrompt, kind} = options - const srcZhPath = `${baseName}${localeExtensions.zh}` - const srcEnPath = `${baseName}${localeExtensions.en}` + const zhExtensions = this.normalizeExtensions(localeExtensions.zh) + const enExtensions = this.normalizeExtensions(localeExtensions.en) + const srcZhPath = this.resolveLocalizedPath('', baseName, zhExtensions) + const srcEnPath = this.resolveLocalizedPath('', baseName, enExtensions) const distPath = this.path.join(distDir, `${name}.mdx`) const fullSrcZhPath = isSingleFile ? srcZhPath : this.path.join(srcDir, srcZhPath) @@ -270,8 +319,8 @@ export class LocalizedPromptReader { const hasSrcZh = zhContent != null const hasSrcEn = enContent != null - if (!hasDist && !hasSrcZh) { // If neither src nor dist exists, return null - this.logger.warn(`Missing both dist and Chinese source for: ${name}`) + if (!hasDist && !hasSrcZh) { // If neither src nor source file exists, return null + this.logger.warn(`Missing both dist and source file for: ${name}`) return null } @@ -309,7 +358,7 @@ export class LocalizedPromptReader { private async readLocaleContent( filePath: string, locale: Locale, - createPrompt: (content: string, locale: Locale, name: string) => T | Promise, + createPrompt: (content: string, locale: Locale, name: string, metadata?: Record) => T | Promise, name: string ): Promise | null> { if (!this.exists(filePath)) return null @@ -326,7 +375,7 @@ export class LocalizedPromptReader { const parsed = parseMarkdown(rawMdx) // Parse front matter - const prompt = await createPrompt(compileResult.content, locale, name) // Create prompt object + const prompt = await createPrompt(compileResult.content, locale, name, compileResult.metadata.fields) // Create prompt object const result: LocalizedContent = { content: compileResult.content, @@ -349,23 +398,38 @@ export class LocalizedPromptReader { private async readDistContent( filePath: string, - createPrompt: (content: string, locale: Locale, name: string) => T | Promise, + createPrompt: (content: string, locale: Locale, name: string, metadata?: Record) => T | Promise, name: string ): Promise | null> { if (!this.exists(filePath)) return null try { - const content = this.fs.readFileSync(filePath, 'utf8') + const rawMdx = this.fs.readFileSync(filePath, 'utf8') const stats = this.fs.statSync(filePath) + const compileResult = await mdxToMd(rawMdx, { + globalScope: this.globalScope, + extractMetadata: true, + basePath: this.path.dirname(filePath) + }) + const parsed = parseMarkdown(rawMdx) - const prompt = await createPrompt(content, 'zh', name) // Create prompt from dist content (no compilation needed) + const prompt = await createPrompt( + compileResult.content, + 'zh', + name, + compileResult.metadata.fields + ) - return { - content, + const result: LocalizedContent = { + content: compileResult.content, lastModified: stats.mtime, prompt, - filePath + filePath, + rawMdx } + + if (parsed.yamlFrontMatter != null) Object.assign(result, {frontMatter: parsed.yamlFrontMatter}) + return result } catch (error) { this.logger.warn(`Failed to read dist content: ${filePath}`, {error}) return null @@ -375,13 +439,13 @@ export class LocalizedPromptReader { private scanChildren( dir: string, entryFileName: string, - zhExtension: string + zhExtensions: readonly string[] ): string[] { const children: string[] = [] if (!this.exists(dir)) return children - const entryFullName = `${entryFileName}${zhExtension}` + const entryFullNames = new Set(zhExtensions.map(extension => `${entryFileName}${extension}`)) try { const scanDir = (currentDir: string, relativePath: string): void => { @@ -394,8 +458,11 @@ export class LocalizedPromptReader { : entry.name if (entry.isDirectory()) scanDir(fullPath, relativeFullPath) - else if (entry.name.endsWith(zhExtension) && entry.name !== entryFullName) { - const nameWithoutExt = entry.name.slice(0, -zhExtension.length) // Child doc: relative path without extension + else { + const matchedExtension = this.findMatchingExtension(entry.name, zhExtensions) + if (matchedExtension == null || entryFullNames.has(entry.name)) continue + + const nameWithoutExt = entry.name.slice(0, -matchedExtension.length) // Child doc: relative path without extension const relativeDir = this.path.dirname(relativeFullPath) const childPath = relativeDir === '.' ? nameWithoutExt @@ -420,6 +487,31 @@ export class LocalizedPromptReader { return false } } + + private normalizeExtensions(extension: LocalizedFileExtension): readonly string[] { + return typeof extension === 'string' + ? [extension] + : extension + } + + private findMatchingExtension(fileName: string, extensions: readonly string[]): string | undefined { + return extensions.find(extension => fileName.endsWith(extension)) + } + + private resolveLocalizedPath(dir: string, baseFileName: string, extensions: readonly string[]): string { + const defaultPath = dir === '' + ? `${baseFileName}${extensions[0]}` + : this.path.join(dir, `${baseFileName}${extensions[0]}`) + + for (const extension of extensions) { + const candidate = dir === '' + ? `${baseFileName}${extension}` + : this.path.join(dir, `${baseFileName}${extension}`) + if (this.exists(candidate)) return candidate + } + + return defaultPath + } } /** @@ -428,7 +520,7 @@ export class LocalizedPromptReader { export function createLocalizedPromptReader( fs: typeof import('node:fs'), path: typeof import('node:path'), - logger: import('../plugin-shared').ILogger, + logger: ILogger, globalScope?: MdxGlobalScope ): LocalizedPromptReader { return new LocalizedPromptReader(fs, path, logger, globalScope) @@ -438,4 +530,4 @@ export { type DirectoryReadResult, type LocalizedReadOptions, type ReadError -} from '../plugin-shared' +} from './types' diff --git a/cli/src/plugins/plugin-output-shared/McpConfigManager.ts b/cli/src/plugins/plugin-core/McpConfigManager.ts similarity index 98% rename from cli/src/plugins/plugin-output-shared/McpConfigManager.ts rename to cli/src/plugins/plugin-core/McpConfigManager.ts index 35d73a3c..ebf40ac3 100644 --- a/cli/src/plugins/plugin-output-shared/McpConfigManager.ts +++ b/cli/src/plugins/plugin-core/McpConfigManager.ts @@ -1,4 +1,4 @@ -import type {ILogger, McpServerConfig, SkillPrompt} from '../plugin-shared' +import type {ILogger, McpServerConfig, SkillPrompt} from './types' import * as path from 'node:path' /** diff --git a/cli/src/plugins/plugin-shared/types/RegistryTypes.ts b/cli/src/plugins/plugin-core/OutputTypes.ts similarity index 51% rename from cli/src/plugins/plugin-shared/types/RegistryTypes.ts rename to cli/src/plugins/plugin-core/OutputTypes.ts index 0054f51a..64f1c147 100644 --- a/cli/src/plugins/plugin-shared/types/RegistryTypes.ts +++ b/cli/src/plugins/plugin-core/OutputTypes.ts @@ -1,17 +1,71 @@ +import type {GlobalConfigDirectoryType} from './enums' +import type {SubAgentPrompt} from './InputTypes' +import type {AbsolutePath, RelativePath} from './PromptTypes' + +/** + * Global configuration based on user_home root directory + */ +export interface GlobalConfigDirectoryInUserHome { + readonly type: K + readonly directory: RelativePath +} + +/** + * Special, absolute path global memory prompt + */ +export interface GlobalConfigDirectoryInOther { + readonly type: K + readonly directory: AbsolutePath +} + +export type GlobalConfigDirectory = GlobalConfigDirectoryInUserHome | GlobalConfigDirectoryInOther + +export interface Target { + +} + +/** + * SubAgent frontmatter field mapping + * Value can be a static string or a function that extracts value from SubAgentPrompt + */ +export type SubAgentFrontMatterField = string | ((subAgent: SubAgentPrompt) => unknown) + /** - * Registry Configuration Writer Types - * - * Type definitions for registry data structures used by output plugins - * to register their outputs in external tool registry files. - * - * @see Requirements 2.1, 2.2, 2.3, 3.1, 3.2, 3.3, 3.5 + * SubAgent output configuration for declarative configuration */ +export interface SubAgentOutputConfig { + /** Output subdirectory name (relative to IDE config directory) */ + readonly subDir?: string + + /** File name format template */ + readonly fileNameTemplate?: 'prefix-agent' | 'prefix_agent' | 'agent' | string + + /** Whether to include series prefix */ + readonly includeSeriesPrefix?: boolean + + /** Series prefix separator */ + readonly seriesSeparator?: string + + /** Frontmatter configuration */ + readonly frontMatter?: { + /** Custom field mappings */ + readonly fields?: Record + /** Fields to exclude */ + readonly exclude?: string[] + } + + /** Content transformation options */ + readonly contentTransform?: { + /** Whether to transform MDX references to Markdown */ + readonly transformMdxRefs?: boolean + /** Custom content processor */ + readonly processor?: (content: string, subAgent: SubAgentPrompt) => string + } +} /** * Generic registry data structure. * All registry files must have version and lastUpdated fields. - * - * @see Requirements 1.8 */ export interface RegistryData { readonly version: string @@ -20,8 +74,6 @@ export interface RegistryData { /** * Result of a registry operation. - * - * @see Requirements 5.4 */ export interface RegistryOperationResult { readonly success: boolean @@ -32,8 +84,6 @@ export interface RegistryOperationResult { /** * Source information for a Kiro power. * Indicates the origin type of a registered power. - * - * @see Requirements 3.1, 3.2 */ export interface KiroPowerSource { readonly type: 'local' | 'repo' | 'registry' @@ -45,11 +95,6 @@ export interface KiroPowerSource { /** * A single power entry in the Kiro registry. * Contains metadata about an installed power. - * - * Field order matches Kiro's expected format: - * name → description → mcpServers → author → keywords → displayName → installed → installedAt → installPath → source → sourcePath - * - * @see Requirements 2.1, 2.2, 2.3, 2.4 */ export interface KiroPowerEntry { readonly name: string @@ -68,8 +113,6 @@ export interface KiroPowerEntry { /** * Repository source tracking in Kiro registry. * Tracks the source/origin of registered items. - * - * @see Requirements 3.1, 3.2, 3.3, 3.5 */ export interface KiroRepoSource { readonly name: string @@ -84,8 +127,6 @@ export interface KiroRepoSource { /** * Kiro recommended repo metadata (preserved during updates). - * - * @see Requirements 4.5, 4.6 */ export interface KiroRecommendedRepo { readonly url: string @@ -96,8 +137,6 @@ export interface KiroRecommendedRepo { /** * Complete Kiro powers registry structure. * Represents the full ~/.kiro/powers/registry.json file. - * - * @see Requirements 4.1, 4.2 */ export interface KiroPowersRegistry extends RegistryData { readonly powers: Record diff --git a/cli/src/plugins/plugin-shared/types/PromptTypes.ts b/cli/src/plugins/plugin-core/PromptTypes.ts similarity index 84% rename from cli/src/plugins/plugin-shared/types/PromptTypes.ts rename to cli/src/plugins/plugin-core/PromptTypes.ts index ccc4dd22..e18d333c 100644 --- a/cli/src/plugins/plugin-shared/types/PromptTypes.ts +++ b/cli/src/plugins/plugin-core/PromptTypes.ts @@ -1,8 +1,38 @@ import type {Root, RootContent} from '@truenine/md-compiler' -import type {ClaudeCodeCLISubAgentColors, CodingAgentTools, FilePathKind, NamingCaseKind, PromptKind, RuleScope} from './Enums' -import type {FileContent, Path, RelativePath, RootPath} from './FileSystemTypes' +import type {ClaudeCodeCLISubAgentColors, CodingAgentTools, FilePathKind, NamingCaseKind, PromptKind, RuleScope} from './enums' import type {GlobalConfigDirectory} from './OutputTypes' +/** Common directory representation */ +export interface Path { + readonly pathKind: K + readonly path: string + readonly getDirectoryName: () => string +} + +/** Relative path directory */ +export interface RelativePath extends Path { + readonly basePath: string + getAbsolutePath: () => string +} + +/** Absolute path directory */ +export type AbsolutePath = Path + +/** Root path directory */ +export type RootPath = Path + +export interface FileContent< + C = unknown, + FK extends FilePathKind = FilePathKind.Relative, + F extends Path = RelativePath +> { + content: C + length: number + filePathKind: FK + dir: F + charsetEncoding?: BufferEncoding +} + /** * Prompt */ diff --git a/cli/src/plugins/plugin-output-shared/registry/RegistryWriter.ts b/cli/src/plugins/plugin-core/RegistryWriter.ts similarity index 96% rename from cli/src/plugins/plugin-output-shared/registry/RegistryWriter.ts rename to cli/src/plugins/plugin-core/RegistryWriter.ts index 247cd67b..42b2f03e 100644 --- a/cli/src/plugins/plugin-output-shared/registry/RegistryWriter.ts +++ b/cli/src/plugins/plugin-core/RegistryWriter.ts @@ -7,14 +7,12 @@ * @see Requirements 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 7.1, 7.2 */ -import type {ILogger} from '../../plugin-shared' -import type {RegistryData, RegistryOperationResult} from '../../plugin-shared/types' +import type {ILogger, RegistryData, RegistryOperationResult} from './types' import * as fs from 'node:fs' import * as os from 'node:os' import * as path from 'node:path' - -import {createLogger} from '../../plugin-shared' +import {createLogger} from '@truenine/logger' /** * Abstract base class for registry configuration writers. diff --git a/cli/src/plugins/plugin-core/constants.ts b/cli/src/plugins/plugin-core/constants.ts new file mode 100644 index 00000000..6d42a079 --- /dev/null +++ b/cli/src/plugins/plugin-core/constants.ts @@ -0,0 +1,118 @@ +import type {UserConfigFile} from './ConfigTypes.schema' + +export const PathPlaceholders = { + USER_HOME: '~', + WORKSPACE: '$WORKSPACE' +} as const + +type DefaultUserConfig = Readonly>> +export const DEFAULT_USER_CONFIG = {} as DefaultUserConfig + +export const PLUGIN_NAMES = { + AgentsOutput: 'AgentsOutputPlugin', + GeminiCLIOutput: 'GeminiCLIOutputPlugin', + CursorOutput: 'CursorOutputPlugin', + WindsurfOutput: 'WindsurfOutputPlugin', + ClaudeCodeCLIOutput: 'ClaudeCodeCLIOutputPlugin', + KiroIDEOutput: 'KiroCLIOutputPlugin', + OpencodeCLIOutput: 'OpencodeCLIOutputPlugin', + OpenAICodexCLIOutput: 'CodexCLIOutputPlugin', + DroidCLIOutput: 'DroidCLIOutputPlugin', + WarpIDEOutput: 'WarpIDEOutputPlugin', + TraeIDEOutput: 'TraeIDEOutputPlugin', + TraeCNIDEOutput: 'TraeCNIDEOutputPlugin', + QoderIDEOutput: 'QoderIDEPluginOutputPlugin', + JetBrainsCodeStyleOutput: 'JetBrainsIDECodeStyleConfigOutputPlugin', + JetBrainsAICodexOutput: 'JetBrainsAIAssistantCodexOutputPlugin', + AgentSkillsCompactOutput: 'GenericSkillsOutputPlugin', + GitExcludeOutput: 'GitExcludeOutputPlugin', + ReadmeOutput: 'ReadmeMdConfigFileOutputPlugin', + VSCodeOutput: 'VisualStudioCodeIDEConfigOutputPlugin', + EditorConfigOutput: 'EditorConfigOutputPlugin', + AntigravityOutput: 'AntigravityOutputPlugin' +} as const + +export type PluginName = (typeof PLUGIN_NAMES)[keyof typeof PLUGIN_NAMES] + +/** + * Constants for output plugins. + */ +export const OutputFileNames = { + SKILL: 'SKILL.md', + CURSOR_GLOBAL_RULE: 'global.mdc', + CURSOR_PROJECT_RULE: 'always.md', + MCP_CONFIG: 'mcp.json', + CLAUDE_MEMORY: 'CLAUDE.md', + WINDSURF_GLOBAL_RULE: 'global_rules.md' +} as const + +export const OutputPrefixes = { + RULE: 'rule-', + CHILD_RULE: 'glob-' +} as const + +export const OutputSubdirectories = { + RULES: 'rules', + COMMANDS: 'commands', + SKILLS: 'skills', + AGENTS: 'agents', + CURSOR_SKILLS: 'skills-cursor' +} as const + +export const FrontMatterFields = { + ALWAYS_APPLY: 'alwaysApply', + GLOBS: 'globs', + DESCRIPTION: 'description', + NAME: 'name', + TRIGGER: 'trigger' +} as const + +export const FileExtensions = { + MD: '.md', + MDC: '.mdc', + MDX: '.mdx', + JSON: '.json' +} as const + +export const SourcePromptExtensions = { + PRIMARY: '.src.mdx' +} as const + +export const SourcePromptFileExtensions = [ + SourcePromptExtensions.PRIMARY +] as const + +export const SourceLocaleExtensions = { + zh: SourcePromptFileExtensions, + en: FileExtensions.MDX +} as const + +export function hasSourcePromptExtension(fileName: string): boolean { + return SourcePromptFileExtensions.some(extension => fileName.endsWith(extension)) +} + +export const GlobalConfigDirs = { + CURSOR: '.cursor', + CLAUDE: '.claude', + WINDSURF: '.codeium/windsurf', + WINDSURF_RULES: '.windsurf' +} as const + +export const IgnoreFiles = { + CURSOR: '.cursorignore', + WINDSURF: '.codeiumignore' +} as const + +export const PreservedSkills = { + CURSOR: new Set([ + 'create-rule', + 'create-skill', + 'create-subagent', + 'migrate-to-skills', + 'update-cursor-settings' + ]) +} as const + +export const ToolPresets = { + CLAUDE_CODE: 'claudeCode' +} as const diff --git a/cli/src/plugins/plugin-core/enums.ts b/cli/src/plugins/plugin-core/enums.ts new file mode 100644 index 00000000..115633f0 --- /dev/null +++ b/cli/src/plugins/plugin-core/enums.ts @@ -0,0 +1,53 @@ +export enum PluginKind { + Input = 'input', + Output = 'output' +} + +export enum PromptKind { + GlobalMemory = 'globalMemory', + ProjectRootMemory = 'projectRootMemory', + ProjectChildrenMemory = 'projectChildrenMemory', + Command = 'command', + SubAgent = 'subAgent', + Skill = 'skill', + SkillChildDoc = 'skillChildDoc', + SkillResource = 'skillResource', + SkillMcpConfig = 'skillMcpConfig', + Readme = 'readme', + Rule = 'rule' +} + +export type RuleScope = 'project' | 'global' | 'workspace' + +export enum FilePathKind { + Relative = 'relative', + Absolute = 'absolute', + Root = 'root' +} + +export enum IDEKind { + VSCode = 'vscode', + IntellijIDEA = 'intellijIdea', + Git = 'git', + EditorConfig = 'editorconfig', + Original = 'original' +} + +export enum NamingCaseKind { + CamelCase = 'camelCase', + PascalCase = 'pascalCase', + SnakeCase = 'snakeCase', + KebabCase = 'kebabCase', + UpperCase = 'upperCase', + LowerCase = 'lowerCase', + Original = 'original' +} + +export enum GlobalConfigDirectoryType { + UserHome = 'userHome', + External = 'external' +} + +export type CodingAgentTools = string + +export type ClaudeCodeCLISubAgentColors = string diff --git a/cli/src/plugins/plugin-core/filters.ts b/cli/src/plugins/plugin-core/filters.ts new file mode 100644 index 00000000..7c0566d2 --- /dev/null +++ b/cli/src/plugins/plugin-core/filters.ts @@ -0,0 +1,327 @@ +import type { + ProjectConfig, + RulePrompt, + SeriName +} from './types' +import * as fs from 'node:fs' +import {createRequire} from 'node:module' +import * as path from 'node:path' +import process from 'node:process' + +/** Core series filtering helpers. Delegates to the unified CLI Rust NAPI when available, falls back to pure-TS implementations otherwise. */ +function resolveEffectiveIncludeSeriesTS(topLevel?: readonly string[], typeSpecific?: readonly string[]): string[] { + if (topLevel == null && typeSpecific == null) return [] + return [...new Set([...topLevel ?? [], ...typeSpecific ?? []])] +} + +function matchesSeriesTS(seriName: string | readonly string[] | null | undefined, effectiveIncludeSeries: readonly string[]): boolean { + if (seriName == null) return true + if (effectiveIncludeSeries.length === 0) return true + if (typeof seriName === 'string') return effectiveIncludeSeries.includes(seriName) + return seriName.some(name => effectiveIncludeSeries.includes(name)) +} + +function resolveSubSeriesTS( + topLevel?: Readonly>, + typeSpecific?: Readonly> +): Record { + if (topLevel == null && typeSpecific == null) return {} + const merged: Record = {} + for (const [key, values] of Object.entries(topLevel ?? {})) merged[key] = [...values] + for (const [key, values] of Object.entries(typeSpecific ?? {})) { + merged[key] = Object.hasOwn(merged, key) ? [...new Set([...merged[key]!, ...values])] : [...values] + } + return merged +} + +interface SeriesFilterFns { + resolveEffectiveIncludeSeries: typeof resolveEffectiveIncludeSeriesTS + matchesSeries: typeof matchesSeriesTS + resolveSubSeries: typeof resolveSubSeriesTS +} + +function isSeriesFilterFns(candidate: unknown): candidate is SeriesFilterFns { + if (candidate == null || typeof candidate !== 'object') return false + const c = candidate as Record + return typeof c['matchesSeries'] === 'function' + && typeof c['resolveEffectiveIncludeSeries'] === 'function' + && typeof c['resolveSubSeries'] === 'function' +} + +function tryLoadNapi(): SeriesFilterFns | undefined { + const suffixMap: Record = { + 'win32-x64': 'win32-x64-msvc', + 'linux-x64': 'linux-x64-gnu', + 'linux-arm64': 'linux-arm64-gnu', + 'darwin-arm64': 'darwin-arm64', + 'darwin-x64': 'darwin-x64' + } + const suffix = suffixMap[`${process.platform}-${process.arch}`] + if (suffix == null) return void 0 + + const packageName = `@truenine/memory-sync-cli-${suffix}` + const binaryFile = `napi-memory-sync-cli.${suffix}.node` + + try { + const _require = createRequire(import.meta.url) + const candidates = [ + packageName, + `${packageName}/${binaryFile}`, + `./${binaryFile}` + ] + + for (const specifier of candidates) { + try { + const loaded = _require(specifier) as unknown + const possible = [loaded, (loaded as {default?: unknown})?.default, (loaded as {config?: unknown})?.config] + for (const candidate of possible) { + if (isSeriesFilterFns(candidate)) return candidate + } + } + catch {} + } + } + catch { + } // NAPI unavailable — pure-TS fallback will be used. + return void 0 +} + +const { + resolveEffectiveIncludeSeries, + matchesSeries, + resolveSubSeries +}: SeriesFilterFns = tryLoadNapi() ?? { + resolveEffectiveIncludeSeries: resolveEffectiveIncludeSeriesTS, + matchesSeries: matchesSeriesTS, + resolveSubSeries: resolveSubSeriesTS +} + +/** + * Interface for items that can be filtered by series name + */ +export interface SeriesFilterable { + readonly seriName?: SeriName +} + +/** + * Configuration path types for project config lookup + */ +export type FilterConfigPath = 'commands' | 'skills' | 'subAgents' | 'rules' + +export function filterByProjectConfig( + items: readonly T[], + projectConfig: ProjectConfig | undefined, + configPath: FilterConfigPath +): readonly T[] { + const effectiveSeries = resolveEffectiveIncludeSeries( + projectConfig?.includeSeries, + projectConfig?.[configPath]?.includeSeries + ) + return items.filter(item => matchesSeries(item.seriName, effectiveSeries)) +} + +export function normalizeSubdirPath(subdir: string): string { + let normalized = subdir.replaceAll(/\.\/+/g, '') + normalized = normalized.replaceAll(/\/+$/g, '') + return normalized +} + +function smartConcatGlob(prefix: string, glob: string): string { + if (glob.startsWith('**/')) return `${prefix}/${glob}` + if (glob.startsWith('*')) return `${prefix}/**/${glob}` + return `${prefix}/${glob}` +} + +function extractPrefixAndBaseGlob( + glob: string, + prefixes: readonly string[] +): {prefix: string | null, baseGlob: string} { + for (const prefix of prefixes) { + const normalizedPrefix = prefix.replaceAll(/\/+$/g, '') + const patterns = [ + {prefix: normalizedPrefix, pattern: `${normalizedPrefix}/`}, + {prefix: normalizedPrefix, pattern: `${normalizedPrefix}\\`} + ] + for (const {prefix: p, pattern} of patterns) { + if (glob.startsWith(pattern)) return {prefix: p, baseGlob: glob.slice(pattern.length)} + } + if (glob === normalizedPrefix) return {prefix: normalizedPrefix, baseGlob: '**/*'} + } + return {prefix: null, baseGlob: glob} +} + +export function applySubSeriesGlobPrefix( + rules: readonly RulePrompt[], + projectConfig: ProjectConfig | undefined +): readonly RulePrompt[] { + const subSeries = resolveSubSeries(projectConfig?.subSeries, projectConfig?.rules?.subSeries) + if (Object.keys(subSeries).length === 0) return rules + + const normalizedSubSeries: Record = {} + for (const [subdir, seriNames] of Object.entries(subSeries)) { + const normalizedSubdir = normalizeSubdirPath(subdir) + normalizedSubSeries[normalizedSubdir] = seriNames + } + + const allPrefixes = Object.keys(normalizedSubSeries) + + return rules.map(rule => { + if (rule.seriName == null) return rule + + const matchedPrefixes: string[] = [] + for (const [subdir, seriNames] of Object.entries(normalizedSubSeries)) { + const matched = Array.isArray(rule.seriName) + ? rule.seriName.some(name => seriNames.includes(name)) + : seriNames.includes(rule.seriName) + if (matched) matchedPrefixes.push(subdir) + } + + if (matchedPrefixes.length === 0) return rule + + const newGlobs: string[] = [] + for (const originalGlob of rule.globs) { + const {prefix: existingPrefix, baseGlob} = extractPrefixAndBaseGlob(originalGlob, allPrefixes) + + if (existingPrefix != null) newGlobs.push(originalGlob) + + for (const prefix of matchedPrefixes) { + if (prefix === existingPrefix) continue + const newGlob = smartConcatGlob(prefix, baseGlob) + if (!newGlobs.includes(newGlob)) newGlobs.push(newGlob) + } + } + + return { + ...rule, + globs: newGlobs + } + }) +} + +/** + * Resolves the actual `.git/info` directory for a given project path. + * Handles both regular git repos (`.git` is a directory) and submodules/worktrees (`.git` is a file with `gitdir:` pointer). + * Returns `null` if no valid git info directory can be resolved. + */ +export function resolveGitInfoDir(projectDir: string): string | null { + const dotGitPath = path.join(projectDir, '.git') + + if (!fs.existsSync(dotGitPath)) return null + + const stat = fs.lstatSync(dotGitPath) + + if (stat.isDirectory()) { + const infoDir = path.join(dotGitPath, 'info') + return infoDir + } + + if (stat.isFile()) { + try { + const content = fs.readFileSync(dotGitPath, 'utf8').trim() + const match = /^gitdir: (.+)$/.exec(content) + if (match?.[1] != null) { + const gitdir = path.resolve(projectDir, match[1]) + return path.join(gitdir, 'info') + } + } + catch { + } // ignore read errors + } + + return null +} + +/** + * Recursively discovers all `.git` entries (directories or files) under a given root, + * skipping common non-source directories. + * Returns absolute paths of directories containing a `.git` entry. + */ +export function findAllGitRepos(rootDir: string, maxDepth = 5): string[] { + const results: string[] = [] + const SKIP_DIRS = new Set(['node_modules', '.turbo', 'dist', 'build', 'out', '.cache']) + + function walk(dir: string, depth: number): void { + if (depth > maxDepth) return + + let entries: fs.Dirent[] + try { + const raw = fs.readdirSync(dir, {withFileTypes: true}) + if (!Array.isArray(raw)) return + entries = raw + } + catch { + return + } + + const hasGit = entries.some(e => e.name === '.git') + if (hasGit && dir !== rootDir) results.push(dir) + + for (const entry of entries) { + if (!entry.isDirectory()) continue + if (entry.name === '.git' || SKIP_DIRS.has(entry.name)) continue + walk(path.join(dir, entry.name), depth + 1) + } + } + + walk(rootDir, 0) + return results +} + +/** + * Scans `.git/modules/` directory recursively to find all submodule `info/` dirs. + * Handles nested submodules (modules within modules). + * Returns absolute paths of `info/` directories. + */ +export function findGitModuleInfoDirs(dotGitDir: string): string[] { + const modulesDir = path.join(dotGitDir, 'modules') + if (!fs.existsSync(modulesDir)) return [] + + const results: string[] = [] + + function walk(dir: string): void { + let entries: fs.Dirent[] + try { + const raw = fs.readdirSync(dir, {withFileTypes: true}) + if (!Array.isArray(raw)) return + entries = raw + } + catch { + return + } + + const hasInfo = entries.some(e => e.name === 'info' && e.isDirectory()) + if (hasInfo) results.push(path.join(dir, 'info')) + + const nestedModules = entries.find(e => e.name === 'modules' && e.isDirectory()) + if (nestedModules == null) return + + let subEntries: fs.Dirent[] + try { + const raw = fs.readdirSync(path.join(dir, 'modules'), {withFileTypes: true}) + if (!Array.isArray(raw)) return + subEntries = raw + } + catch { + return + } + for (const sub of subEntries) { + if (sub.isDirectory()) walk(path.join(dir, 'modules', sub.name)) + } + } + + let topEntries: fs.Dirent[] + try { + const raw = fs.readdirSync(modulesDir, {withFileTypes: true}) + if (!Array.isArray(raw)) return results + topEntries = raw + } + catch { + return results + } + + for (const entry of topEntries) { + if (entry.isDirectory()) walk(path.join(modulesDir, entry.name)) + } + + return results +} diff --git a/cli/src/plugins/plugin-core/plugin.outputScopes.validation.test.ts b/cli/src/plugins/plugin-core/plugin.outputScopes.validation.test.ts new file mode 100644 index 00000000..086da0c6 --- /dev/null +++ b/cli/src/plugins/plugin-core/plugin.outputScopes.validation.test.ts @@ -0,0 +1,116 @@ +import type {ILogger} from '@truenine/logger' +import type {OutputPlugin, OutputWriteContext} from './plugin' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {FilePathKind, PluginKind} from './enums' +import { + collectAllPluginOutputs, + executeDeclarativeWriteOutputs, + validateOutputScopeOverridesForPlugins +} from './plugin' + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} + } as ILogger +} + +function createMockWriteContext(pluginName: string, topicOverride: Record): OutputWriteContext { + return { + logger: createMockLogger(), + fs, + path, + glob: {} as never, + dryRun: true, + pluginOptions: { + outputScopes: { + plugins: { + [pluginName]: topicOverride + } + } + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Relative, + path: '.', + basePath: '.', + getDirectoryName: () => '.', + getAbsolutePath: () => path.resolve('.') + }, + projects: [] + } + } + } as OutputWriteContext +} + +function createMockOutputPlugin(name: string): OutputPlugin { + return { + type: PluginKind.Output, + name, + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: { + commands: { + scopes: ['global'], + singleScope: true + } + }, + async declareOutputFiles() { + return [] + }, + async convertContent() { + return '' + } + } +} + +describe('outputScopes capability validation', () => { + it('accepts valid topic override', async () => { + const plugin = createMockOutputPlugin('MockOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {commands: 'global'}) + + const result = await executeDeclarativeWriteOutputs([plugin], ctx) + expect(result.has(plugin.name)).toBe(true) + }) + + it('throws when override topic is unsupported by plugin capabilities', async () => { + const plugin = createMockOutputPlugin('MockOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {rules: 'global'}) + + await expect(executeDeclarativeWriteOutputs([plugin], ctx)) + .rejects + .toThrow('does not support topic "rules"') + }) + + it('throws when override scope is not allowed by plugin capabilities', async () => { + const plugin = createMockOutputPlugin('MockOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {commands: 'project'}) + + await expect(executeDeclarativeWriteOutputs([plugin], ctx)) + .rejects + .toThrow('requests unsupported scopes [project]') + }) + + it('applies the same validation in output collection path', async () => { + const plugin = createMockOutputPlugin('MockOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {rules: 'global'}) + + await expect(collectAllPluginOutputs([plugin], ctx)) + .rejects + .toThrow('does not support topic "rules"') + }) + + it('throws for multi-scope selection on single-scope topic', () => { + const plugin = createMockOutputPlugin('MockOutputPlugin') + const ctx = createMockWriteContext(plugin.name, {commands: ['global', 'project']}) + + expect(() => validateOutputScopeOverridesForPlugins([plugin], ctx.pluginOptions)) + .toThrow('is single-scope and cannot request multiple scopes') + }) +}) diff --git a/cli/src/plugins/plugin-core/plugin.ts b/cli/src/plugins/plugin-core/plugin.ts new file mode 100644 index 00000000..e4d54651 --- /dev/null +++ b/cli/src/plugins/plugin-core/plugin.ts @@ -0,0 +1,508 @@ +import type {ILogger} from '@truenine/logger' +import type {MdxGlobalScope} from '@truenine/md-compiler/globals' +import type { + AindexConfig, + CleanupProtectionOptions, + CommandSeriesOptions, + OutputScopeOptions, + OutputScopeSelection, + PluginOutputScopeTopics, + ProtectionMode +} from './ConfigTypes.schema' +import type {PluginKind} from './enums' +import type { + InputCollectedContext, + OutputCollectedContext, + Project +} from './InputTypes' +import {Buffer} from 'node:buffer' + +export type FastGlobType = typeof import('fast-glob') + +/** + * Opaque type for ScopeRegistry. + * Concrete implementation lives in plugin-input-shared. + */ +export interface ScopeRegistryLike { + resolve: (expression: string) => string +} + +export interface Plugin { + readonly type: T + readonly name: string + readonly log: ILogger + readonly dependsOn?: readonly string[] +} + +export interface PluginContext { + logger: ILogger + fs: typeof import('node:fs') + path: typeof import('node:path') + glob: FastGlobType +} + +export interface InputPluginContext extends PluginContext { + readonly userConfigOptions: Required + readonly dependencyContext: Partial + readonly runtimeCommand?: 'execute' | 'dry-run' | 'clean' | 'plugins' + + readonly globalScope?: MdxGlobalScope + + readonly scopeRegistry?: ScopeRegistryLike +} + +export interface InputPlugin extends Plugin { + collect: (ctx: InputPluginContext) => Partial | Promise> +} + +/** + * Plugin that can enhance projects after all projects are collected. + * This is useful for plugins that need to add data to projects + * that were collected by other plugins. + */ +export interface ProjectEnhancerPlugin extends InputPlugin { + enhanceProjects: (ctx: InputPluginContext, projects: readonly Project[]) => Project[] +} + +/** + * Context for output plugin operations + */ +export interface OutputPluginContext extends PluginContext { + readonly collectedOutputContext: OutputCollectedContext + readonly pluginOptions?: PluginOptions +} + +/** + * Context for output cleaning operations + */ +export interface OutputCleanContext extends OutputPluginContext { + readonly dryRun?: boolean +} + +/** + * Context for output writing operations + */ +export interface OutputWriteContext extends OutputPluginContext { + readonly dryRun?: boolean + + readonly registeredPluginNames?: readonly string[] +} + +/** + * Result of a single write operation + */ +export interface WriteResult { + readonly path: string + readonly success: boolean + readonly skipped?: boolean + readonly error?: Error +} + +/** + * Collected results from write operations + */ +export interface WriteResults { + readonly files: readonly WriteResult[] + readonly dirs: readonly WriteResult[] +} + +/** + * Awaitable type for sync/async flexibility + */ +export type Awaitable = T | Promise + +/** + * Result of executing an input effect. + * Used for preprocessing/cleaning input sources before collection. + */ +export interface InputEffectResult { + /** Whether the effect executed successfully */ + readonly success: boolean + /** Error details if the effect failed */ + readonly error?: Error + /** Description of what the effect did (for logging) */ + readonly description?: string + /** Files that were modified/created */ + readonly modifiedFiles?: readonly string[] + /** Files that were deleted */ + readonly deletedFiles?: readonly string[] +} + +/** + * Context provided to input effect handlers. + * Contains utilities and configuration for effect execution. + */ +export interface InputEffectContext { + /** Logger instance */ + readonly logger: ILogger + /** File system module */ + readonly fs: typeof import('node:fs') + /** Path module */ + readonly path: typeof import('node:path') + /** Glob module for file matching */ + readonly glob: FastGlobType + /** Child process spawn function */ + readonly spawn: typeof import('node:child_process').spawn + /** User configuration options */ + readonly userConfigOptions: Required + /** Resolved workspace directory */ + readonly workspaceDir: string + /** Resolved aindex directory */ + readonly aindexDir: string + /** Whether running in dry-run mode */ + readonly dryRun?: boolean +} + +/** + * Handler function for input effects. + * Receives the effect context and returns an effect result. + */ +export type InputEffectHandler = (ctx: InputEffectContext) => Awaitable + +/** + * Registration entry for an input effect. + */ +export interface InputEffectRegistration { + /** Descriptive name for logging */ + readonly name: string + /** The effect handler function */ + readonly handler: InputEffectHandler + /** Priority for execution order (lower = earlier, default: 0) */ + readonly priority?: number +} + +/** + * Result of resolving base paths from plugin options. + */ +export interface ResolvedBasePaths { + /** The resolved workspace directory path */ + readonly workspaceDir: string + /** The resolved aindex directory path */ + readonly aindexDir: string +} + +/** + * Represents a registered scope entry from a plugin. + */ +export interface PluginScopeRegistration { + /** The namespace name (e.g., 'myPlugin') */ + readonly namespace: string + /** Key-value pairs registered under this namespace */ + readonly values: Record +} + +/** + * Output plugin interface. + * Declarative write model only: + * - Plugins declare target files + * - Plugins convert source metadata to content + * - Core runtime performs all file system operations + */ +export interface OutputPlugin extends Plugin { + readonly declarativeOutput: true + readonly outputCapabilities: OutputPluginCapabilities + + declareOutputFiles: (ctx: OutputWriteContext) => Awaitable + + convertContent: (declaration: OutputFileDeclaration, ctx: OutputWriteContext) => Awaitable + + declareCleanupPaths?: (ctx: OutputCleanContext) => Awaitable +} + +/** + * Scope of a declared output file target. + */ +export type OutputDeclarationScope = 'project' | 'workspace' | 'global' + +/** + * Supported output scope override topics. + */ +export const OUTPUT_SCOPE_TOPICS = ['prompt', 'rules', 'commands', 'subagents', 'skills', 'mcp'] as const + +/** + * Topic key for output scope override and capability declarations. + */ +export type OutputScopeTopic = (typeof OUTPUT_SCOPE_TOPICS)[number] + +/** + * Capability declaration for one output topic. + * - scopes: allowed source scopes for selection/override + * - singleScope: whether the topic resolves to a single scope by priority + */ +export interface OutputTopicCapability { + readonly scopes: readonly OutputDeclarationScope[] + readonly singleScope: boolean +} + +/** + * Per-plugin capability matrix for output topics. + */ +export type OutputPluginCapabilities = Partial> + +/** + * Declarative output file declaration. + * Output plugins only declare target paths and source metadata. + * Core runtime performs all file system write operations. + */ +export interface OutputFileDeclaration { + /** Absolute target file path */ + readonly path: string + /** Target scope classification for cleanup/routing */ + readonly scope?: OutputDeclarationScope + /** Plugin-defined source descriptor for content conversion */ + readonly source: unknown + /** Optional existing-file policy */ + readonly ifExists?: 'overwrite' | 'skip' | 'error' + /** Optional symlink target for declarative link creation */ + readonly symlinkTarget?: string + /** Optional label for logging */ + readonly label?: string +} + +/** + * Scope of declarative cleanup targets. + */ +export type OutputCleanupScope = OutputDeclarationScope | 'xdgConfig' + +/** + * Kind of cleanup target. + */ +export type OutputCleanupTargetKind = 'file' | 'directory' | 'glob' + +/** + * Declarative cleanup target. + */ +export interface OutputCleanupPathDeclaration { + /** Absolute path or glob pattern */ + readonly path: string + /** Target kind */ + readonly kind: OutputCleanupTargetKind + /** Protection mode to apply when used in protect declarations */ + readonly protectionMode?: ProtectionMode + /** Optional scope label for logging/trace */ + readonly scope?: OutputCleanupScope + /** Optional label for diagnostics */ + readonly label?: string +} + +/** + * Optional cleanup declaration set for one output plugin. + */ +export interface OutputCleanupDeclarations { + /** Paths/patterns that should be cleaned */ + readonly delete?: readonly OutputCleanupPathDeclaration[] + /** Paths/patterns that must be protected from cleanup */ + readonly protect?: readonly OutputCleanupPathDeclaration[] + /** Glob ignore patterns when expanding delete/protect globs */ + readonly excludeScanGlobs?: readonly string[] +} + +function isNodeBufferLike(value: unknown): value is Buffer { + return Buffer.isBuffer(value) +} + +function normalizeScopeSelection(selection: OutputScopeSelection): readonly OutputDeclarationScope[] { + if (typeof selection === 'string') return [selection] + + const unique: OutputDeclarationScope[] = [] + for (const scope of selection) { + if (!unique.includes(scope)) unique.push(scope) + } + return unique +} + +function getPluginScopeOverrides( + pluginName: string, + pluginOptions?: PluginOptions +): PluginOutputScopeTopics | undefined { + return pluginOptions?.outputScopes?.plugins?.[pluginName] +} + +export function validateOutputPluginCapabilities(plugin: OutputPlugin): void { + for (const topic of OUTPUT_SCOPE_TOPICS) { + const capability = plugin.outputCapabilities[topic] + if (capability == null) continue + if (capability.scopes.length === 0) throw new Error(`Plugin ${plugin.name} declares empty scopes for topic "${topic}"`) + } +} + +export function validateOutputScopeOverridesForPlugin( + plugin: OutputPlugin, + pluginOptions?: PluginOptions +): void { + const overrides = getPluginScopeOverrides(plugin.name, pluginOptions) + if (overrides == null) return + + for (const topic of OUTPUT_SCOPE_TOPICS) { + const requestedSelection = overrides[topic] + if (requestedSelection == null) continue + + const capability = plugin.outputCapabilities[topic] + if (capability == null) { + throw new Error( + `Invalid outputScopes configuration: outputScopes.plugins.${plugin.name}.${topic} is set, but plugin ${plugin.name} does not support topic "${topic}".` + ) + } + + const requestedScopes = normalizeScopeSelection(requestedSelection) + if (capability.singleScope && requestedScopes.length > 1) { + const requested = requestedScopes.join(', ') + throw new Error( + `Invalid outputScopes configuration: outputScopes.plugins.${plugin.name}.${topic} is single-scope and cannot request multiple scopes [${requested}].` + ) + } + + const allowedScopes = new Set(capability.scopes) + const unsupportedScopes = requestedScopes.filter(scope => !allowedScopes.has(scope)) + + if (unsupportedScopes.length > 0) { + const allowed = capability.scopes.join(', ') + const requested = unsupportedScopes.join(', ') + throw new Error( + `Invalid outputScopes configuration: outputScopes.plugins.${plugin.name}.${topic} requests unsupported scopes [${requested}]. Allowed scopes: [${allowed}].` + ) + } + } +} + +export function validateOutputScopeOverridesForPlugins( + plugins: readonly OutputPlugin[], + pluginOptions?: PluginOptions +): void { + for (const plugin of plugins) { + validateOutputPluginCapabilities(plugin) + validateOutputScopeOverridesForPlugin(plugin, pluginOptions) + } +} + +/** + * Execute declarative write operations for output plugins. + * Core runtime owns file system writes; plugins only declare and convert content. + */ +export async function executeDeclarativeWriteOutputs( + plugins: readonly OutputPlugin[], + ctx: OutputWriteContext +): Promise> { + const results = new Map() + + validateOutputScopeOverridesForPlugins(plugins, ctx.pluginOptions) + + for (const plugin of plugins) { + const declarations = await plugin.declareOutputFiles(ctx) + const fileResults: WriteResult[] = [] + + for (const declaration of declarations) { + if (ctx.dryRun === true) { + fileResults.push({path: declaration.path, success: true, skipped: false}) + continue + } + + try { + const parentDir = ctx.path.dirname(declaration.path) + ctx.fs.mkdirSync(parentDir, {recursive: true}) + + if (declaration.ifExists === 'skip' && ctx.fs.existsSync(declaration.path)) { + fileResults.push({path: declaration.path, success: true, skipped: true}) + continue + } + + if (declaration.ifExists === 'error' && ctx.fs.existsSync(declaration.path)) throw new Error(`Refusing to overwrite existing file: ${declaration.path}`) + + if (declaration.symlinkTarget != null) { + if (ctx.fs.existsSync(declaration.path)) ctx.fs.rmSync(declaration.path, {force: true, recursive: false}) + ctx.fs.symlinkSync(declaration.symlinkTarget, declaration.path, 'file') + fileResults.push({path: declaration.path, success: true}) + continue + } + + const content = await plugin.convertContent(declaration, ctx) + isNodeBufferLike(content) + ? ctx.fs.writeFileSync(declaration.path, content) + : ctx.fs.writeFileSync(declaration.path, content, 'utf8') + fileResults.push({path: declaration.path, success: true}) + } + catch (error) { + fileResults.push({path: declaration.path, success: false, error: error as Error}) + } + } + + const pluginResult: WriteResults = {files: fileResults, dirs: []} + results.set(plugin.name, pluginResult) + } + + return results +} + +/** + * Collected outputs from all plugins. + * Used by the clean command to gather all artifacts for cleanup. + */ +export interface CollectedOutputs { + readonly projectDirs: readonly string[] + readonly projectFiles: readonly string[] + readonly workspaceDirs: readonly string[] + readonly workspaceFiles: readonly string[] + readonly globalDirs: readonly string[] + readonly globalFiles: readonly string[] +} + +/** + * Collect all outputs from all registered output plugins. + * This is the main entry point for the clean command. + */ +export async function collectAllPluginOutputs( + plugins: readonly OutputPlugin[], + ctx: OutputPluginContext +): Promise { + const projectDirs: string[] = [] + const projectFiles: string[] = [] + const workspaceDirs: string[] = [] + const workspaceFiles: string[] = [] + const globalDirs: string[] = [] + const globalFiles: string[] = [] + + validateOutputScopeOverridesForPlugins(plugins, ctx.pluginOptions) + + for (const plugin of plugins) { + const declarations = await plugin.declareOutputFiles({...ctx, dryRun: true}) + for (const declaration of declarations) { + if (declaration.scope === 'global') globalFiles.push(declaration.path) + else if (declaration.scope === 'workspace') workspaceFiles.push(declaration.path) + else projectFiles.push(declaration.path) + } + } + + return { + projectDirs, + projectFiles, + workspaceDirs, + workspaceFiles, + globalDirs, + globalFiles + } +} + +/** + * Configuration to be processed by plugin.config.ts + * Interpreted by plugin system as collection context + * Path placeholder `~` resolves to the user home directory. + * + * @see InputCollectedContext - Input-side collected context + * @see OutputCollectedContext - Output-side collected context + */ +export interface PluginOptions { + readonly version?: string + + readonly workspaceDir?: string + + readonly aindex?: AindexConfig + + readonly commandSeriesOptions?: CommandSeriesOptions + + readonly outputScopes?: OutputScopeOptions + + readonly cleanupProtection?: CleanupProtectionOptions + + plugins?: Plugin[] + logLevel?: 'trace' | 'debug' | 'info' | 'warn' | 'error' +} diff --git a/cli/src/plugins/plugin-core/scopePolicy.test.ts b/cli/src/plugins/plugin-core/scopePolicy.test.ts new file mode 100644 index 00000000..5ed30ff5 --- /dev/null +++ b/cli/src/plugins/plugin-core/scopePolicy.test.ts @@ -0,0 +1,50 @@ +import {describe, expect, it} from 'vitest' +import {resolveTopicScopes} from './scopePolicy' + +describe('resolveTopicScopes', () => { + it('selects highest available scope for single-scope topics', () => { + const result = resolveTopicScopes({ + defaultScopes: ['project', 'workspace', 'global'], + supportedScopes: ['project', 'workspace', 'global'], + singleScope: true, + availableScopes: ['workspace', 'global'] + }) + + expect(result).toEqual(['workspace']) + }) + + it('respects requested scope when provided', () => { + const result = resolveTopicScopes({ + requestedScopes: ['global'], + defaultScopes: ['project', 'workspace', 'global'], + supportedScopes: ['project', 'workspace', 'global'], + singleScope: true, + availableScopes: ['project', 'global'] + }) + + expect(result).toEqual(['global']) + }) + + it('returns prioritized multi-scope list for multi-scope topics', () => { + const result = resolveTopicScopes({ + requestedScopes: ['global', 'project', 'workspace'], + defaultScopes: ['project', 'workspace', 'global'], + supportedScopes: ['project', 'workspace', 'global'], + singleScope: false + }) + + expect(result).toEqual(['project', 'workspace', 'global']) + }) + + it('returns empty when requested scope is unsupported', () => { + const result = resolveTopicScopes({ + requestedScopes: ['workspace'], + defaultScopes: ['project'], + supportedScopes: ['global'], + singleScope: true, + availableScopes: ['workspace', 'global'] + }) + + expect(result).toEqual([]) + }) +}) diff --git a/cli/src/plugins/plugin-core/scopePolicy.ts b/cli/src/plugins/plugin-core/scopePolicy.ts new file mode 100644 index 00000000..5e28e90f --- /dev/null +++ b/cli/src/plugins/plugin-core/scopePolicy.ts @@ -0,0 +1,73 @@ +import type {OutputDeclarationScope} from './plugin' + +export const DEFAULT_SCOPE_PRIORITY: readonly OutputDeclarationScope[] = ['project', 'workspace', 'global'] as const + +export type ScopeSelectionInput = OutputDeclarationScope | readonly OutputDeclarationScope[] | undefined + +function normalizeSelection(selection: ScopeSelectionInput): OutputDeclarationScope[] { + if (selection == null) return [] + if (typeof selection === 'string') return [selection] + const unique: OutputDeclarationScope[] = [] + for (const scope of selection) { + if (!unique.includes(scope)) unique.push(scope) + } + return unique +} + +function sortByPriority( + scopes: readonly OutputDeclarationScope[], + priority: readonly OutputDeclarationScope[] +): OutputDeclarationScope[] { + const priorityIndex = new Map() + for (const [index, scope] of priority.entries()) priorityIndex.set(scope, index) + + return [...scopes].sort((a, b) => { + const ia = priorityIndex.get(a) ?? Number.MAX_SAFE_INTEGER + const ib = priorityIndex.get(b) ?? Number.MAX_SAFE_INTEGER + return ia - ib + }) +} + +export interface ResolveTopicScopesOptions { + readonly requestedScopes?: ScopeSelectionInput + readonly defaultScopes: readonly OutputDeclarationScope[] + readonly supportedScopes: readonly OutputDeclarationScope[] + readonly singleScope: boolean + readonly availableScopes?: readonly OutputDeclarationScope[] + readonly priority?: readonly OutputDeclarationScope[] +} + +export function resolveTopicScopes( + options: ResolveTopicScopesOptions +): readonly OutputDeclarationScope[] { + const { + requestedScopes, + defaultScopes, + supportedScopes, + singleScope, + availableScopes, + priority = DEFAULT_SCOPE_PRIORITY + } = options + + const requested = normalizeSelection(requestedScopes) + const defaults = normalizeSelection(defaultScopes) + const supported = new Set(normalizeSelection(supportedScopes)) + + const base = requested.length > 0 ? requested : defaults + const candidates = base.filter(scope => supported.has(scope)) + if (candidates.length === 0) return [] + + const prioritized = sortByPriority(candidates, priority) + + if (!singleScope) return prioritized + + if (availableScopes != null && availableScopes.length > 0) { + const available = new Set(availableScopes) + const matched = prioritized.find(scope => available.has(scope)) + if (matched == null) return [] + return [matched] + } + const [first] = prioritized + if (first == null) return [] + return [first] +} diff --git a/cli/src/plugins/plugin-core/types.ts b/cli/src/plugins/plugin-core/types.ts new file mode 100644 index 00000000..36bb64dd --- /dev/null +++ b/cli/src/plugins/plugin-core/types.ts @@ -0,0 +1,35 @@ +export * from './AindexTypes' +export * from './ConfigTypes.schema' +export * from './enums' +export * from './ExportMetadataTypes' +export * from './InputTypes' +export * from './OutputTypes' +export * from './plugin' +export * from './PromptTypes' +export type { + ILogger, + LogLevel +} from '@truenine/logger' + +export class MissingDependencyError extends Error { + readonly pluginName: string + + readonly missingDependency: string + + constructor(pluginName: string, missingDependency: string) { + super(`Plugin "${pluginName}" depends on missing plugin "${missingDependency}"`) + this.name = 'MissingDependencyError' + this.pluginName = pluginName + this.missingDependency = missingDependency + } +} + +export class CircularDependencyError extends Error { + readonly cyclePath: readonly string[] + + constructor(cyclePath: readonly string[]) { + super(`Circular plugin dependency detected: ${cyclePath.join(' -> ')}`) + this.name = 'CircularDependencyError' + this.cyclePath = [...cyclePath] + } +} diff --git a/cli/src/plugins/plugin-cursor/index.ts b/cli/src/plugins/plugin-cursor.ts similarity index 100% rename from cli/src/plugins/plugin-cursor/index.ts rename to cli/src/plugins/plugin-cursor.ts diff --git a/cli/src/plugins/plugin-cursor/CursorOutputPlugin.ts b/cli/src/plugins/plugin-cursor/CursorOutputPlugin.ts deleted file mode 100644 index 64013bce..00000000 --- a/cli/src/plugins/plugin-cursor/CursorOutputPlugin.ts +++ /dev/null @@ -1,464 +0,0 @@ -import type { - CommandPrompt, - OutputPluginContext, - OutputWriteContext, - RulePrompt, - SkillPrompt, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import {Buffer} from 'node:buffer' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' -import { - AbstractOutputPlugin, - applySubSeriesGlobPrefix, - filterCommandsByProjectConfig, - filterRulesByProjectConfig, - filterSkillsByProjectConfig, - GlobalConfigDirs, - IgnoreFiles, - McpConfigManager, - OutputFileNames, - OutputPrefixes, - OutputSubdirectories, - PreservedSkills, - transformMcpConfigForCursor -} from '@truenine/plugin-output-shared' -import {FilePathKind, PLUGIN_NAMES} from '../plugin-shared' - -const GLOBAL_CONFIG_DIR = GlobalConfigDirs.CURSOR // Constants for local use (consider moving to constants.ts if used by multiple plugins) -const MCP_CONFIG_FILE = OutputFileNames.MCP_CONFIG -const COMMANDS_SUBDIR = OutputSubdirectories.COMMANDS -const RULES_SUBDIR = OutputSubdirectories.RULES -const GLOBAL_RULE_FILE = OutputFileNames.CURSOR_GLOBAL_RULE -const SKILLS_CURSOR_SUBDIR = OutputSubdirectories.CURSOR_SKILLS -const SKILL_FILE_NAME = OutputFileNames.SKILL -const RULE_FILE_PREFIX = OutputPrefixes.RULE -const PRESERVED_SKILLS = PreservedSkills.CURSOR - -export class CursorOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('CursorOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: '', - dependsOn: [PLUGIN_NAMES.AgentsOutput], - indexignore: IgnoreFiles.CURSOR - }) - - this.registerCleanEffect('mcp-config-cleanup', async ctx => { - const globalDir = this.getGlobalConfigDir() - const mcpConfigPath = path.join(globalDir, MCP_CONFIG_FILE) - const emptyMcpConfig = {mcpServers: {}} - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'mcpConfigCleanup', path: mcpConfigPath}) - return {success: true, description: 'Would reset mcp.json to empty shell'} - } - - try { - this.ensureDirectory(globalDir) - fs.writeFileSync(mcpConfigPath, JSON.stringify(emptyMcpConfig, null, 2)) - this.log.trace({action: 'clean', type: 'mcpConfigCleanup', path: mcpConfigPath}) - return {success: true, description: 'Reset mcp.json to empty shell'} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'clean', type: 'mcpConfigCleanup', path: mcpConfigPath, error: errMsg}) - return {success: false, error: error as Error, description: 'Failed to reset mcp.json'} - } - }) - } - - async registerGlobalOutputDirs(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const globalDir = this.getGlobalConfigDir() - const {commands, skills, rules} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - - if (commands != null && commands.length > 0) { - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - if (filteredCommands.length > 0) { - const commandsDir = this.getGlobalCommandsDir() - results.push({pathKind: FilePathKind.Relative, path: COMMANDS_SUBDIR, basePath: globalDir, getDirectoryName: () => COMMANDS_SUBDIR, getAbsolutePath: () => commandsDir}) - } - } - - if (skills != null && skills.length > 0) { - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter.name - if (this.isPreservedSkill(skillName)) continue - const skillPath = path.join(globalDir, SKILLS_CURSOR_SUBDIR, skillName) - results.push({pathKind: FilePathKind.Relative, path: path.join(SKILLS_CURSOR_SUBDIR, skillName), basePath: globalDir, getDirectoryName: () => skillName, getAbsolutePath: () => skillPath}) - } - } - - const globalRules = rules?.filter(r => this.normalizeRuleScope(r) === 'global') - if (globalRules == null || globalRules.length === 0) return results - - const globalRulesDir = path.join(globalDir, RULES_SUBDIR) - results.push({pathKind: FilePathKind.Relative, path: RULES_SUBDIR, basePath: globalDir, getDirectoryName: () => RULES_SUBDIR, getAbsolutePath: () => globalRulesDir}) - return results - } - - async registerGlobalOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const globalDir = this.getGlobalConfigDir() - const {skills, commands} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const filteredSkills = skills != null ? filterSkillsByProjectConfig(skills, projectConfig) : [] - const hasAnyMcpConfig = filteredSkills.some(s => s.mcpConfig != null) - - if (hasAnyMcpConfig) { - const mcpConfigPath = path.join(globalDir, MCP_CONFIG_FILE) - results.push({pathKind: FilePathKind.Relative, path: MCP_CONFIG_FILE, basePath: globalDir, getDirectoryName: () => GLOBAL_CONFIG_DIR, getAbsolutePath: () => mcpConfigPath}) - } - - if (commands != null && commands.length > 0) { - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - const commandsDir = this.getGlobalCommandsDir() - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - for (const cmd of filteredCommands) { - const fileName = this.transformCommandName(cmd, transformOptions) - const fullPath = path.join(commandsDir, fileName) - results.push({pathKind: FilePathKind.Relative, path: path.join(COMMANDS_SUBDIR, fileName), basePath: globalDir, getDirectoryName: () => COMMANDS_SUBDIR, getAbsolutePath: () => fullPath}) - } - } - - const globalRules = ctx.collectedInputContext.rules?.filter(r => this.normalizeRuleScope(r) === 'global') - if (globalRules != null && globalRules.length > 0) { - const globalRulesDir = path.join(globalDir, RULES_SUBDIR) - for (const rule of globalRules) { - const fileName = this.buildRuleFileName(rule) - const fullPath = path.join(globalRulesDir, fileName) - results.push({pathKind: FilePathKind.Relative, path: path.join(RULES_SUBDIR, fileName), basePath: globalDir, getDirectoryName: () => RULES_SUBDIR, getAbsolutePath: () => fullPath}) - } - } - - if (filteredSkills.length === 0) return results - - const skillsCursorDir = this.getSkillsCursorDir() - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter.name - if (this.isPreservedSkill(skillName)) continue - const skillDir = path.join(skillsCursorDir, skillName) - results.push({pathKind: FilePathKind.Relative, path: path.join(SKILLS_CURSOR_SUBDIR, skillName, SKILL_FILE_NAME), basePath: globalDir, getDirectoryName: () => skillName, getAbsolutePath: () => path.join(skillDir, SKILL_FILE_NAME)}) - - if (skill.mcpConfig != null) results.push({pathKind: FilePathKind.Relative, path: path.join(SKILLS_CURSOR_SUBDIR, skillName, MCP_CONFIG_FILE), basePath: globalDir, getDirectoryName: () => skillName, getAbsolutePath: () => path.join(skillDir, MCP_CONFIG_FILE)}) - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) { - const outputRelativePath = childDoc.relativePath.replace(/\.mdx$/, '.md') - results.push({pathKind: FilePathKind.Relative, path: path.join(SKILLS_CURSOR_SUBDIR, skillName, outputRelativePath), basePath: globalDir, getDirectoryName: () => skillName, getAbsolutePath: () => path.join(skillDir, outputRelativePath)}) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) results.push({pathKind: FilePathKind.Relative, path: path.join(SKILLS_CURSOR_SUBDIR, skillName, resource.relativePath), basePath: globalDir, getDirectoryName: () => skillName, getAbsolutePath: () => path.join(skillDir, resource.relativePath)}) - } - } - return results - } - - async registerProjectOutputDirs(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {workspace, globalMemory, rules} = ctx.collectedInputContext - const hasProjectRules = rules?.some(r => this.normalizeRuleScope(r) === 'project') ?? false - if (globalMemory == null && !hasProjectRules) return results - for (const project of workspace.projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - results.push(this.createProjectRulesDirRelativePath(projectDir)) - } - return results - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {workspace, globalMemory, rules} = ctx.collectedInputContext - if (globalMemory == null && rules == null) return results - - if (globalMemory != null) { - for (const project of workspace.projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - results.push(this.createProjectRuleFileRelativePath(projectDir, GLOBAL_RULE_FILE)) - } - } - - if (rules != null && rules.length > 0) { - for (const project of workspace.projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - const projectRules = applySubSeriesGlobPrefix(filterRulesByProjectConfig(rules.filter(r => this.normalizeRuleScope(r) === 'project'), project.projectConfig), project.projectConfig) - for (const rule of projectRules) results.push(this.createProjectRuleFileRelativePath(projectDir, this.buildRuleFileName(rule))) - } - } - - results.push(...this.registerProjectIgnoreOutputFiles(workspace.projects)) - return results - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {workspace, skills, commands, globalMemory, rules, aiAgentIgnoreConfigFiles} = ctx.collectedInputContext - const hasSkills = (skills?.length ?? 0) > 0 - const hasFastCommands = (commands?.length ?? 0) > 0 - const hasRules = (rules?.length ?? 0) > 0 - const hasGlobalRuleOutput = globalMemory != null && workspace.projects.some(p => p.dirFromWorkspacePath != null) - const hasCursorIgnore = aiAgentIgnoreConfigFiles?.some(f => f.fileName === '.cursorignore') ?? false - if (hasSkills || hasFastCommands || hasGlobalRuleOutput || hasRules || hasCursorIgnore) return true - this.log.trace({action: 'skip', reason: 'noOutputs'}) - return false - } - - async writeGlobalOutputs(ctx: OutputWriteContext): Promise { - const {skills, commands, rules} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - if (skills != null && skills.length > 0) { - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - const mcpResult = await this.writeGlobalMcpConfig(ctx, filteredSkills) - if (mcpResult != null) fileResults.push(mcpResult) - const skillsCursorDir = this.getSkillsCursorDir() - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter.name - if (this.isPreservedSkill(skillName)) continue - fileResults.push(...await this.writeGlobalSkill(ctx, skillsCursorDir, skill)) - } - } - - if (commands != null && commands.length > 0) { - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - const commandsDir = this.getGlobalCommandsDir() - for (const cmd of filteredCommands) fileResults.push(await this.writeGlobalCommand(ctx, commandsDir, cmd)) - } - - const globalRules = rules?.filter(r => this.normalizeRuleScope(r) === 'global') - if (globalRules == null || globalRules.length === 0) return {files: fileResults, dirs: dirResults} - - const globalRulesDir = path.join(this.getGlobalConfigDir(), RULES_SUBDIR) - for (const rule of globalRules) fileResults.push(await this.writeRuleMdcFile(ctx, globalRulesDir, rule, this.getGlobalConfigDir())) - return {files: fileResults, dirs: dirResults} - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - const {workspace, globalMemory, rules} = ctx.collectedInputContext - if (globalMemory != null) { - const content = this.buildGlobalRuleContent(globalMemory.content as string) - for (const project of workspace.projects) { - if (project.dirFromWorkspacePath == null) continue - fileResults.push(await this.writeProjectGlobalRule(ctx, project, content)) - } - } - - if (rules != null && rules.length > 0) { - for (const project of workspace.projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - const projectRules = applySubSeriesGlobPrefix(filterRulesByProjectConfig(rules.filter(r => this.normalizeRuleScope(r) === 'project'), project.projectConfig), project.projectConfig) - if (projectRules.length === 0) continue - const rulesDir = path.join(projectDir.basePath, projectDir.path, GLOBAL_CONFIG_DIR, RULES_SUBDIR) - for (const rule of projectRules) fileResults.push(await this.writeRuleMdcFile(ctx, rulesDir, rule, projectDir.basePath)) - } - } - - fileResults.push(...await this.writeProjectIgnoreFiles(ctx)) - return {files: fileResults, dirs: dirResults} - } - - private createProjectRulesDirRelativePath(projectDir: RelativePath): RelativePath { - const rulesDirPath = path.join(projectDir.path, GLOBAL_CONFIG_DIR, RULES_SUBDIR) - return {pathKind: FilePathKind.Relative, path: rulesDirPath, basePath: projectDir.basePath, getDirectoryName: () => RULES_SUBDIR, getAbsolutePath: () => path.join(projectDir.basePath, rulesDirPath)} - } - - private createProjectRuleFileRelativePath(projectDir: RelativePath, fileName: string): RelativePath { - const filePath = path.join(projectDir.path, GLOBAL_CONFIG_DIR, RULES_SUBDIR, fileName) - return {pathKind: FilePathKind.Relative, path: filePath, basePath: projectDir.basePath, getDirectoryName: () => RULES_SUBDIR, getAbsolutePath: () => path.join(projectDir.basePath, filePath)} - } - - private buildGlobalRuleContent(content: string): string { - return buildMarkdownWithFrontMatter({description: 'Global prompt (synced)', alwaysApply: true}, content) - } - - private async writeProjectGlobalRule(ctx: OutputWriteContext, project: {dirFromWorkspacePath?: RelativePath | null}, content: string): Promise { - const projectDir = project.dirFromWorkspacePath! - const rulesDir = path.join(projectDir.basePath, projectDir.path, GLOBAL_CONFIG_DIR, RULES_SUBDIR) - const fullPath = path.join(rulesDir, GLOBAL_RULE_FILE) - const relativePath = this.createProjectRuleFileRelativePath(projectDir, GLOBAL_RULE_FILE) - - return this.writeFileWithHandling(ctx, fullPath, content, { - type: 'globalRule', - relativePath - }) - } - - private isPreservedSkill(name: string): boolean { return PRESERVED_SKILLS.has(name) } - private getSkillsCursorDir(): string { return path.join(this.getGlobalConfigDir(), SKILLS_CURSOR_SUBDIR) } - private getGlobalCommandsDir(): string { return path.join(this.getGlobalConfigDir(), COMMANDS_SUBDIR) } - - private async writeGlobalCommand(ctx: OutputWriteContext, commandsDir: string, cmd: CommandPrompt): Promise { - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - const fileName = this.transformCommandName(cmd, transformOptions) - const fullPath = path.join(commandsDir, fileName) - const globalDir = this.getGlobalConfigDir() - const relativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(COMMANDS_SUBDIR, fileName), basePath: globalDir, getDirectoryName: () => COMMANDS_SUBDIR, getAbsolutePath: () => fullPath} - const content = this.buildMarkdownContentWithRaw(cmd.content, cmd.yamlFrontMatter, cmd.rawFrontMatter) - - return this.writeFileWithHandling(ctx, fullPath, content, { - type: 'globalFastCommand', - relativePath - }) - } - - private async writeGlobalMcpConfig(ctx: OutputWriteContext, skills: readonly SkillPrompt[]): Promise { - const mcpManager = new McpConfigManager({fs, logger: this.log}) - const servers = mcpManager.collectMcpServers(skills) - - if (servers.size === 0) return null - - const transformed = mcpManager.transformMcpServers(servers, transformMcpConfigForCursor) - - const globalDir = this.getGlobalConfigDir() - const mcpConfigPath = path.join(globalDir, MCP_CONFIG_FILE) - - const result = mcpManager.writeCursorMcpConfig(mcpConfigPath, transformed, ctx.dryRun === true) - - return { - path: {pathKind: FilePathKind.Relative, path: MCP_CONFIG_FILE, basePath: globalDir, getDirectoryName: () => GLOBAL_CONFIG_DIR, getAbsolutePath: () => mcpConfigPath}, - success: result.success, - ...result.error != null && {error: result.error}, - ...ctx.dryRun && {skipped: true} - } - } - - private async writeGlobalSkill(ctx: OutputWriteContext, skillsDir: string, skill: SkillPrompt): Promise { - const results: WriteResult[] = [] - const skillName = skill.yamlFrontMatter.name - const skillDir = path.join(skillsDir, skillName) - const skillFilePath = path.join(skillDir, SKILL_FILE_NAME) - const globalDir = this.getGlobalConfigDir() - const skillRelativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(SKILLS_CURSOR_SUBDIR, skillName, SKILL_FILE_NAME), basePath: globalDir, getDirectoryName: () => skillName, getAbsolutePath: () => skillFilePath} - - const frontMatterData = this.buildSkillFrontMatter(skill) - const skillContent = buildMarkdownWithFrontMatter(frontMatterData, skill.content as string) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'skill', path: skillFilePath}) - results.push({path: skillRelativePath, success: true, skipped: false}) - } else { - try { - this.ensureDirectory(skillDir) - this.writeFileSync(skillFilePath, skillContent) - this.log.trace({action: 'write', type: 'skill', path: skillFilePath}) - results.push({path: skillRelativePath, success: true}) - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'skill', path: skillFilePath, error: errMsg}) - results.push({path: skillRelativePath, success: false, error: error as Error}) - } - } - - if (skill.mcpConfig != null) results.push(await this.writeSkillMcpConfig(ctx, skill, skillDir, globalDir)) - if (skill.childDocs != null) { for (const childDoc of skill.childDocs) results.push(await this.writeSkillChildDoc(ctx, childDoc, skillDir, skillName, globalDir)) } - if (skill.resources != null) { for (const resource of skill.resources) results.push(await this.writeSkillResource(ctx, resource, skillDir, skillName, globalDir)) } - return results - } - - private async writeSkillMcpConfig(ctx: OutputWriteContext, skill: SkillPrompt, skillDir: string, globalDir: string): Promise { - const skillName = skill.yamlFrontMatter.name - const mcpConfigPath = path.join(skillDir, MCP_CONFIG_FILE) - const relativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(SKILLS_CURSOR_SUBDIR, skillName, MCP_CONFIG_FILE), basePath: globalDir, getDirectoryName: () => skillName, getAbsolutePath: () => mcpConfigPath} - - const mcpManager = new McpConfigManager({fs, logger: this.log}) - const result = mcpManager.writeSkillMcpConfig(mcpConfigPath, skill.mcpConfig!.rawContent, ctx.dryRun === true) - - return { - path: relativePath, - success: result.success, - ...result.error != null && {error: result.error}, - ...ctx.dryRun && {skipped: true} - } - } - - private async writeSkillChildDoc(ctx: OutputWriteContext, childDoc: {relativePath: string, content: unknown}, skillDir: string, skillName: string, globalDir: string): Promise { - const outputRelativePath = childDoc.relativePath.replace(/\.mdx$/, '.md') - const childDocPath = path.join(skillDir, outputRelativePath) - const relativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(SKILLS_CURSOR_SUBDIR, skillName, outputRelativePath), basePath: globalDir, getDirectoryName: () => skillName, getAbsolutePath: () => childDocPath} - const content = childDoc.content as string - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'childDoc', path: childDocPath}) - return {path: relativePath, success: true, skipped: false} - } - try { - const parentDir = path.dirname(childDocPath) - this.ensureDirectory(parentDir) - this.writeFileSync(childDocPath, content) - this.log.trace({action: 'write', type: 'childDoc', path: childDocPath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'childDoc', path: childDocPath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - private async writeSkillResource(ctx: OutputWriteContext, resource: {relativePath: string, content: string, encoding: 'text' | 'base64'}, skillDir: string, skillName: string, globalDir: string): Promise { - const resourcePath = path.join(skillDir, resource.relativePath) - const relativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(SKILLS_CURSOR_SUBDIR, skillName, resource.relativePath), basePath: globalDir, getDirectoryName: () => skillName, getAbsolutePath: () => resourcePath} - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'resource', path: resourcePath}) - return {path: relativePath, success: true, skipped: false} - } - try { - const parentDir = path.dirname(resourcePath) - this.ensureDirectory(parentDir) - if (resource.encoding === 'base64') { - const buffer = Buffer.from(resource.content, 'base64') - this.writeFileSyncBuffer(resourcePath, buffer) - } else this.writeFileSync(resourcePath, resource.content) - this.log.trace({action: 'write', type: 'resource', path: resourcePath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'resource', path: resourcePath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - protected buildRuleMdcContent(rule: RulePrompt): string { - const fmData: Record = {alwaysApply: false, globs: rule.globs.length > 0 ? rule.globs.join(', ') : ''} - const raw = buildMarkdownWithFrontMatter(fmData, rule.content) - const lines = raw.split('\n') - const transformedLines = lines.map(line => { - const match = /^(\s*globs:\s*)(['"])(.*)\2\s*$/.exec(line) - if (match == null) return line - const prefix = match[1] ?? 'globs: ' - const value = match[3] ?? '' - if (value.trim().length === 0) return line - return `${prefix}${value}` - }) - return transformedLines.join('\n') - } - - private async writeRuleMdcFile(ctx: OutputWriteContext, rulesDir: string, rule: RulePrompt, basePath: string): Promise { - const fileName = this.buildRuleFileName(rule, RULE_FILE_PREFIX) - const fullPath = path.join(rulesDir, fileName) - const relativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(GLOBAL_CONFIG_DIR, RULES_SUBDIR, fileName), basePath, getDirectoryName: () => RULES_SUBDIR, getAbsolutePath: () => fullPath} - const content = this.buildRuleMdcContent(rule) - - return this.writeFileWithHandling(ctx, fullPath, content, { - type: 'ruleFile', - relativePath - }) - } -} diff --git a/cli/src/plugins/plugin-droid-cli/index.ts b/cli/src/plugins/plugin-droid-cli.ts similarity index 100% rename from cli/src/plugins/plugin-droid-cli/index.ts rename to cli/src/plugins/plugin-droid-cli.ts diff --git a/cli/src/plugins/plugin-droid-cli/DroidCLIOutputPlugin.ts b/cli/src/plugins/plugin-droid-cli/DroidCLIOutputPlugin.ts deleted file mode 100644 index 023bc4eb..00000000 --- a/cli/src/plugins/plugin-droid-cli/DroidCLIOutputPlugin.ts +++ /dev/null @@ -1,58 +0,0 @@ -import type { - OutputWriteContext, - SkillPrompt, - WriteResult -} from '../plugin-shared' -import * as path from 'node:path' -import {BaseCLIOutputPlugin} from '@truenine/plugin-output-shared' - -const GLOBAL_MEMORY_FILE = 'AGENTS.md' -const GLOBAL_CONFIG_DIR = '.factory' - -export class DroidCLIOutputPlugin extends BaseCLIOutputPlugin { - constructor() { - super('DroidCLIOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: GLOBAL_MEMORY_FILE, - supportsCommands: true, - supportsSubAgents: true, - supportsSkills: true - }) // Droid uses default subdir names - } - - protected override async writeSkill( // Override writeSkill to preserve simplified front matter logic - ctx: OutputWriteContext, - basePath: string, - skill: SkillPrompt - ): Promise { - const results: WriteResult[] = [] - const skillName = skill.yamlFrontMatter?.name ?? skill.dir.getDirectoryName() - const targetDir = path.join(basePath, this.skillsSubDir, skillName) - const fullPath = path.join(targetDir, 'SKILL.md') - - const simplifiedFrontMatter = skill.yamlFrontMatter != null // Droid-specific: Simplify front matter - ? {name: skill.yamlFrontMatter.name, description: skill.yamlFrontMatter.description} - : void 0 - - const content = this.buildMarkdownContent(skill.content as string, simplifiedFrontMatter) - - const mainFileResult = await this.writeFile(ctx, fullPath, content, 'skill') - results.push(mainFileResult) - - if (skill.childDocs != null) { - for (const refDoc of skill.childDocs) { - const refResults = await this.writeSkillReferenceDocument(ctx, targetDir, skillName, refDoc, basePath) - results.push(...refResults) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - const refResults = await this.writeSkillResource(ctx, targetDir, skillName, resource, basePath) - results.push(...refResults) - } - } - - return results - } -} diff --git a/cli/src/plugins/plugin-editorconfig/index.ts b/cli/src/plugins/plugin-editorconfig.ts similarity index 100% rename from cli/src/plugins/plugin-editorconfig/index.ts rename to cli/src/plugins/plugin-editorconfig.ts diff --git a/cli/src/plugins/plugin-editorconfig/EditorConfigOutputPlugin.ts b/cli/src/plugins/plugin-editorconfig/EditorConfigOutputPlugin.ts deleted file mode 100644 index 9308f33d..00000000 --- a/cli/src/plugins/plugin-editorconfig/EditorConfigOutputPlugin.ts +++ /dev/null @@ -1,79 +0,0 @@ -import type { - OutputPluginContext, - OutputWriteContext, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import {AbstractOutputPlugin} from '@truenine/plugin-output-shared' -import {FilePathKind} from '../plugin-shared' - -const EDITOR_CONFIG_FILE = '.editorconfig' - -/** - * Output plugin for writing .editorconfig files to project directories. - * Reads EditorConfig files collected by EditorConfigInputPlugin. - */ -export class EditorConfigOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('EditorConfigOutputPlugin') - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - const {editorConfigFiles} = ctx.collectedInputContext - - if (editorConfigFiles == null || editorConfigFiles.length === 0) return results - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - if (project.isPromptSourceProject === true) continue - - const filePath = this.joinPath(projectDir.path, EDITOR_CONFIG_FILE) - results.push({ - pathKind: FilePathKind.Relative, - path: filePath, - basePath: projectDir.basePath, - getDirectoryName: () => projectDir.getDirectoryName(), - getAbsolutePath: () => this.resolvePath(projectDir.basePath, filePath) - }) - } - - return results - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {editorConfigFiles} = ctx.collectedInputContext - if (editorConfigFiles != null && editorConfigFiles.length > 0) return true - - this.log.debug('skipped', {reason: 'no EditorConfig files found'}) - return false - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const {projects} = ctx.collectedInputContext.workspace - const {editorConfigFiles} = ctx.collectedInputContext - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - if (editorConfigFiles == null || editorConfigFiles.length === 0) return {files: fileResults, dirs: dirResults} - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - if (project.isPromptSourceProject === true) continue - - const projectName = project.name ?? 'unknown' - - for (const config of editorConfigFiles) { - const fullPath = this.resolvePath(projectDir.basePath, projectDir.path, EDITOR_CONFIG_FILE) - const result = await this.writeFile(ctx, fullPath, config.content, `project:${projectName}/.editorconfig`) - fileResults.push(result) - } - } - - return {files: fileResults, dirs: dirResults} - } -} diff --git a/cli/src/plugins/plugin-gemini-cli/index.ts b/cli/src/plugins/plugin-gemini-cli.ts similarity index 100% rename from cli/src/plugins/plugin-gemini-cli/index.ts rename to cli/src/plugins/plugin-gemini-cli.ts diff --git a/cli/src/plugins/plugin-gemini-cli/GeminiCLIOutputPlugin.ts b/cli/src/plugins/plugin-gemini-cli/GeminiCLIOutputPlugin.ts deleted file mode 100644 index 7d2a6887..00000000 --- a/cli/src/plugins/plugin-gemini-cli/GeminiCLIOutputPlugin.ts +++ /dev/null @@ -1,16 +0,0 @@ -import {BaseCLIOutputPlugin} from '@truenine/plugin-output-shared' - -const PROJECT_MEMORY_FILE = 'GEMINI.md' -const GLOBAL_CONFIG_DIR = '.gemini' - -export class GeminiCLIOutputPlugin extends BaseCLIOutputPlugin { - constructor() { - super('GeminiCLIOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: PROJECT_MEMORY_FILE, - supportsCommands: false, - supportsSubAgents: false, - supportsSkills: false - }) - } -} diff --git a/cli/src/plugins/plugin-git-exclude/index.ts b/cli/src/plugins/plugin-git-exclude.ts similarity index 100% rename from cli/src/plugins/plugin-git-exclude/index.ts rename to cli/src/plugins/plugin-git-exclude.ts diff --git a/cli/src/plugins/plugin-git-exclude/GitExcludeOutputPlugin.ts b/cli/src/plugins/plugin-git-exclude/GitExcludeOutputPlugin.ts deleted file mode 100644 index 07f7038f..00000000 --- a/cli/src/plugins/plugin-git-exclude/GitExcludeOutputPlugin.ts +++ /dev/null @@ -1,274 +0,0 @@ -import type { - OutputPluginContext, - OutputWriteContext, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {AbstractOutputPlugin, findAllGitRepos, findGitModuleInfoDirs, resolveGitInfoDir} from '@truenine/plugin-output-shared' -import {FilePathKind} from '../plugin-shared' - -export class GitExcludeOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('GitExcludeOutputPlugin') - } - - async registerProjectOutputDirs(): Promise { - return [] // No directories to clean - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - if (project.isPromptSourceProject === true) continue // Skip prompt source projects - - const projectDirPath = project.dirFromWorkspacePath - const projectDir = projectDirPath.getAbsolutePath() - const {basePath} = projectDirPath - const gitRepoDirs = [projectDir, ...findAllGitRepos(projectDir)] // project root + nested submodules/repos - - for (const repoDir of gitRepoDirs) { - const gitInfoDir = resolveGitInfoDir(repoDir) - if (gitInfoDir == null) continue - - const excludeFilePath = path.join(gitInfoDir, 'exclude') - const relExcludePath = path.relative(basePath, excludeFilePath) - - results.push({ - pathKind: FilePathKind.Relative, - path: relExcludePath, - basePath, - getDirectoryName: () => path.basename(repoDir), - getAbsolutePath: () => excludeFilePath - }) - } - } - - const wsDir = ctx.collectedInputContext.workspace.directory.path // Also register .git/modules/ exclude files - const wsDotGit = path.join(wsDir, '.git') - if (fs.existsSync(wsDotGit) && fs.lstatSync(wsDotGit).isDirectory()) { - for (const moduleInfoDir of findGitModuleInfoDirs(wsDotGit)) { - const excludeFilePath = path.join(moduleInfoDir, 'exclude') - const relExcludePath = path.relative(wsDir, excludeFilePath) - - results.push({ - pathKind: FilePathKind.Relative, - path: relExcludePath, - basePath: wsDir, - getDirectoryName: () => path.basename(path.dirname(moduleInfoDir)), - getAbsolutePath: () => excludeFilePath - }) - } - } - - return results - } - - async registerGlobalOutputDirs(): Promise { - return [] // No global directories to clean - } - - async registerGlobalOutputFiles(): Promise { - return [] // No global files to clean - workspace exclude is handled in writeProjectOutputs - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {globalGitIgnore, shadowGitExclude} = ctx.collectedInputContext - const hasContent = (globalGitIgnore != null && globalGitIgnore.length > 0) - || (shadowGitExclude != null && shadowGitExclude.length > 0) - - if (!hasContent) { - this.log.debug({action: 'canWrite', result: false, reason: 'No gitignore or exclude content found'}) - return false - } - - const {projects} = ctx.collectedInputContext.workspace - const hasGitProjects = projects.some(project => { - if (project.dirFromWorkspacePath == null) return false - const projectDir = project.dirFromWorkspacePath.getAbsolutePath() - if (resolveGitInfoDir(projectDir) != null) return true // Check project root - return findAllGitRepos(projectDir).some(d => resolveGitInfoDir(d) != null) // Check nested repos - }) - - const workspaceDir = ctx.collectedInputContext.workspace.directory.path - const hasWorkspaceGit = resolveGitInfoDir(workspaceDir) != null - - const canWrite = hasGitProjects || hasWorkspaceGit - this.log.debug({ - action: 'canWrite', - result: canWrite, - hasGitProjects, - hasWorkspaceGit, - reason: canWrite ? 'Found git repositories to update' : 'No git repositories found' - }) - - return canWrite - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const fileResults: WriteResult[] = [] - const {globalGitIgnore, shadowGitExclude} = ctx.collectedInputContext - - const managedContent = this.buildManagedContent(globalGitIgnore, shadowGitExclude) - - if (managedContent.length === 0) { - this.log.debug({action: 'write', message: 'No gitignore or exclude content found, skipping'}) - return {files: [], dirs: []} - } - - const {workspace} = ctx.collectedInputContext - const {projects} = workspace - const writtenPaths = new Set() // Track written paths to avoid duplicates - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - - const projectDir = project.dirFromWorkspacePath.getAbsolutePath() - const gitRepoDirs = [projectDir, ...findAllGitRepos(projectDir)] // project root + nested submodules/repos - - for (const repoDir of gitRepoDirs) { - const gitInfoDir = resolveGitInfoDir(repoDir) - if (gitInfoDir == null) continue - - const gitInfoExcludePath = path.join(gitInfoDir, 'exclude') - - if (writtenPaths.has(gitInfoExcludePath)) continue - writtenPaths.add(gitInfoExcludePath) - - const label = repoDir === projectDir - ? `project:${project.name ?? 'unknown'}` - : `nested:${path.relative(projectDir, repoDir)}` - - this.log.trace({action: 'write', path: gitInfoExcludePath, label}) - - const result = await this.writeGitExcludeFile(ctx, gitInfoExcludePath, managedContent, label) - fileResults.push(result) - } - } - - const workspaceDir = workspace.directory.path - const workspaceGitInfoDir = resolveGitInfoDir(workspaceDir) // workspace root .git (may also be submodule host) - - if (workspaceGitInfoDir != null) { - const workspaceGitExclude = path.join(workspaceGitInfoDir, 'exclude') - - if (!writtenPaths.has(workspaceGitExclude)) { - this.log.trace({action: 'write', path: workspaceGitExclude, target: 'workspace'}) - const result = await this.writeGitExcludeFile(ctx, workspaceGitExclude, managedContent, 'workspace') - fileResults.push(result) - writtenPaths.add(workspaceGitExclude) - } - } - - const workspaceNestedRepos = findAllGitRepos(workspaceDir) // nested repos under workspace root not covered by projects - for (const repoDir of workspaceNestedRepos) { - const gitInfoDir = resolveGitInfoDir(repoDir) - if (gitInfoDir == null) continue - - const excludePath = path.join(gitInfoDir, 'exclude') - if (writtenPaths.has(excludePath)) continue - writtenPaths.add(excludePath) - - const label = `workspace-nested:${path.relative(workspaceDir, repoDir)}` - this.log.trace({action: 'write', path: excludePath, label}) - - const result = await this.writeGitExcludeFile(ctx, excludePath, managedContent, label) - fileResults.push(result) - } - - const dotGitDir = path.join(workspaceDir, '.git') // Scan .git/modules/ for submodule info dirs - if (fs.existsSync(dotGitDir) && fs.lstatSync(dotGitDir).isDirectory()) { - for (const moduleInfoDir of findGitModuleInfoDirs(dotGitDir)) { - const excludePath = path.join(moduleInfoDir, 'exclude') - if (writtenPaths.has(excludePath)) continue - writtenPaths.add(excludePath) - - const label = `git-module:${path.relative(dotGitDir, moduleInfoDir)}` - this.log.trace({action: 'write', path: excludePath, label}) - - const result = await this.writeGitExcludeFile(ctx, excludePath, managedContent, label) - fileResults.push(result) - } - } - - return {files: fileResults, dirs: []} - } - - private buildManagedContent(globalGitIgnore?: string, shadowGitExclude?: string): string { - const parts: string[] = [] - - if (globalGitIgnore != null && globalGitIgnore.trim().length > 0) { // Handle globalGitIgnore first - const sanitized = this.sanitizeContent(globalGitIgnore) - if (sanitized.length > 0) parts.push(sanitized) - } - - if (shadowGitExclude != null && shadowGitExclude.trim().length > 0) { // Handle shadowGitExclude - const sanitized = this.sanitizeContent(shadowGitExclude) - if (sanitized.length > 0) parts.push(sanitized) - } - - if (parts.length === 0) return '' // Return early if no content was added - return parts.join('\n') - } - - private sanitizeContent(content: string): string { - const lines = content.split(/\r?\n/) - const filtered = lines.filter(line => { - const trimmed = line.trim() - if (trimmed.length === 0) return true - return !(trimmed.startsWith('#') && !trimmed.startsWith('\\#')) - }) - return filtered.join('\n').trim() - } - - private normalizeContent(content: string): string { - const trimmed = content.trim() - if (trimmed.length === 0) return '' - return `${trimmed}\n` - } - - private async writeGitExcludeFile( - ctx: OutputWriteContext, - filePath: string, - managedContent: string, - label: string - ): Promise { - const workspaceDir = ctx.collectedInputContext.workspace.directory.path // Create RelativePath for the result - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: path.relative(workspaceDir, filePath), - basePath: workspaceDir, - getDirectoryName: () => path.basename(path.dirname(filePath)), - getAbsolutePath: () => filePath - } - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'gitExclude', path: filePath, label}) - return {path: relativePath, success: true, skipped: false} - } - - try { - const gitInfoDir = path.dirname(filePath) // Ensure the .git/info directory exists - if (!fs.existsSync(gitInfoDir)) { - fs.mkdirSync(gitInfoDir, {recursive: true}) - this.log.debug({action: 'mkdir', path: gitInfoDir, message: 'Created .git/info directory'}) - } - - const finalContent = this.normalizeContent(managedContent) - - fs.writeFileSync(filePath, finalContent, 'utf8') // Write the exclude file - this.log.trace({action: 'write', type: 'gitExclude', path: filePath, label}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'gitExclude', path: filePath, label, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } -} diff --git a/cli/src/plugins/plugin-input-shared/BaseDirectoryInputPlugin.ts b/cli/src/plugins/plugin-input-shared/BaseDirectoryInputPlugin.ts deleted file mode 100644 index 98a3aa20..00000000 --- a/cli/src/plugins/plugin-input-shared/BaseDirectoryInputPlugin.ts +++ /dev/null @@ -1,144 +0,0 @@ -import type {ParsedMarkdown} from '@truenine/md-compiler/markdown' -import type { - CollectedInputContext, - InputPluginContext, - PluginOptions, - ResolvedBasePaths, - YAMLFrontMatter -} from '../plugin-shared' -import {mdxToMd} from '@truenine/md-compiler' -import {MetadataValidationError} from '@truenine/md-compiler/errors' -import {parseMarkdown} from '@truenine/md-compiler/markdown' -import {AbstractInputPlugin} from './AbstractInputPlugin' - -/** - * Configuration options for BaseDirectoryInputPlugin - */ -export interface DirectoryInputPluginOptions { - readonly configKey: keyof ResolvedBasePaths | string - - readonly extension?: string -} - -/** - * Abstract base class for input plugins that scan a directory for MDX files. - * Provides common logic for: - * - Directoy scanning - * - File reading - * - MDX compilation - * - Metadata validation - * - Error handling - */ -export abstract class BaseDirectoryInputPlugin< - TPrompt extends { - type: string - content: string - yamlFrontMatter?: TYAML - rawFrontMatter?: string - dir: {path: string, basePath: string} - }, - TYAML extends YAMLFrontMatter -> extends AbstractInputPlugin { - protected readonly configKey: string - protected readonly extension: string - - constructor(name: string, options: DirectoryInputPluginOptions) { - super(name) - this.configKey = options.configKey - this.extension = options.extension ?? '.mdx' - } - - protected abstract validateMetadata(metadata: Record, filePath: string): { - valid: boolean - errors: readonly string[] - warnings: readonly string[] - } - - protected abstract getTargetDir(options: Required, resolvedPaths: ResolvedBasePaths): string - - protected abstract createPrompt( - entryName: string, - filePath: string, - content: string, - yamlFrontMatter: TYAML | undefined, - rawFrontMatter: string | undefined, - parsed: ParsedMarkdown, - baseDir: string, - rawContent: string - ): TPrompt - - protected abstract createResult(items: TPrompt[]): Partial - - async collect(ctx: InputPluginContext): Promise> { - const {userConfigOptions: options, logger, path, fs, globalScope} = ctx - const resolvedPaths = this.resolveBasePaths(options) - - const targetDir = this.getTargetDir(options, resolvedPaths) - const items: TPrompt[] = [] - - if (!(fs.existsSync(targetDir) && fs.statSync(targetDir).isDirectory())) return this.createResult(items) - - try { - const entries = fs.readdirSync(targetDir, {withFileTypes: true}) - for (const entry of entries) { - if (entry.isFile() && entry.name.endsWith(this.extension)) { - const filePath = path.join(targetDir, entry.name) - const rawContent = fs.readFileSync(filePath, 'utf8') - - try { - const parsed = parseMarkdown(rawContent) // Parse YAML front matter first for backward compatibility - - const compileResult = await mdxToMd(rawContent, { // Compile MDX with globalScope and extract metadata from exports - globalScope, - extractMetadata: true, - basePath: targetDir - }) - - const mergedFrontMatter: TYAML | undefined = parsed.yamlFrontMatter != null || Object.keys(compileResult.metadata.fields).length > 0 // Merge YAML front matter with export metadata (export takes priority) - ? { - ...parsed.yamlFrontMatter, - ...compileResult.metadata.fields - } as TYAML - : void 0 - - if (mergedFrontMatter != null) { - const validationResult = this.validateMetadata(mergedFrontMatter as Record, filePath) - - for (const warning of validationResult.warnings) logger.debug(warning) - - if (!validationResult.valid) throw new MetadataValidationError([...validationResult.errors], filePath) - } - - const {content} = compileResult - - logger.debug(`${this.name} metadata extracted`, { - file: entry.name, - source: compileResult.metadata.source, - hasYaml: parsed.yamlFrontMatter != null, - hasExport: Object.keys(compileResult.metadata.fields).length > 0 - }) - - const prompt = this.createPrompt( - entry.name, - filePath, - content, - mergedFrontMatter, - parsed.rawFrontMatter, - parsed, - targetDir, - rawContent - ) - - items.push(prompt) - } catch (e) { - logger.error(`failed to parse ${this.name} item`, {file: filePath, error: e}) - } - } - } - } catch (e) { - logger.error(`Failed to scan directory at ${targetDir}`, {error: e}) - } - - return this.createResult(items) - } -} diff --git a/cli/src/plugins/plugin-input-shared/BaseFileInputPlugin.ts b/cli/src/plugins/plugin-input-shared/BaseFileInputPlugin.ts deleted file mode 100644 index 68f1c53a..00000000 --- a/cli/src/plugins/plugin-input-shared/BaseFileInputPlugin.ts +++ /dev/null @@ -1,57 +0,0 @@ -import type { - CollectedInputContext, - InputPluginContext -} from '../plugin-shared' -import {AbstractInputPlugin} from './AbstractInputPlugin' - -/** - * Options for configuring BaseFileInputPlugin - */ -export interface FileInputPluginOptions { - readonly fallbackContent?: string -} - -export abstract class BaseFileInputPlugin extends AbstractInputPlugin { - protected readonly options: FileInputPluginOptions - - protected constructor(name: string, options?: FileInputPluginOptions) { - super(name) - this.options = options ?? {} - } - - protected abstract getFilePath(shadowProjectDir: string): string - - protected abstract getResultKey(): keyof CollectedInputContext - - protected transformContent(content: string): TResult { - return content as unknown as TResult - } - - collect(ctx: InputPluginContext): Partial { - const {aindexDir} = this.resolveBasePaths(ctx.userConfigOptions) - const filePath = this.getFilePath(aindexDir) - - if (!ctx.fs.existsSync(filePath)) { - if (this.options.fallbackContent != null) { - this.log.debug({action: 'collect', message: 'Using fallback content', path: filePath}) - return {[this.getResultKey()]: this.transformContent(this.options.fallbackContent)} as Partial - } - this.log.debug({action: 'collect', message: 'File not found', path: filePath}) - return {} - } - - const content = ctx.fs.readFileSync(filePath, 'utf8') - - if (content.length === 0) { - if (this.options.fallbackContent != null) { - this.log.debug({action: 'collect', message: 'File empty, using fallback', path: filePath}) - return {[this.getResultKey()]: this.transformContent(this.options.fallbackContent)} as Partial - } - this.log.debug({action: 'collect', message: 'File is empty', path: filePath}) - return {} - } - - this.log.debug({action: 'collect', message: 'Loaded file content', path: filePath, length: content.length}) - return {[this.getResultKey()]: this.transformContent(content)} as Partial - } -} diff --git a/cli/src/plugins/plugin-input-shared/index.ts b/cli/src/plugins/plugin-input-shared/index.ts deleted file mode 100644 index f19e87a8..00000000 --- a/cli/src/plugins/plugin-input-shared/index.ts +++ /dev/null @@ -1,29 +0,0 @@ -export { - AbstractInputPlugin -} from './AbstractInputPlugin' -export { - BaseDirectoryInputPlugin -} from './BaseDirectoryInputPlugin' -export type { - DirectoryInputPluginOptions -} from './BaseDirectoryInputPlugin' -export { - BaseFileInputPlugin -} from './BaseFileInputPlugin' -export type { - FileInputPluginOptions -} from './BaseFileInputPlugin' -export { - createLocalizedPromptReader, - LocalizedPromptReader -} from './LocalizedPromptReader' -export type { - DirectoryReadResult, - LocalizedReadOptions, - ReadError -} from './LocalizedPromptReader' -export { - GlobalScopeCollector, - ScopePriority, - ScopeRegistry -} from './scope' diff --git a/cli/src/plugins/plugin-input-shared/scope/ScopeRegistry.ts b/cli/src/plugins/plugin-input-shared/scope/ScopeRegistry.ts deleted file mode 100644 index 45e5e951..00000000 --- a/cli/src/plugins/plugin-input-shared/scope/ScopeRegistry.ts +++ /dev/null @@ -1,114 +0,0 @@ -import type {EvaluationScope} from '@truenine/md-compiler' // Manages scope registration and merging with priority-based resolution. // src/scope/ScopeRegistry.ts -import type {MdxGlobalScope} from '@truenine/md-compiler/globals' - -/** - * Represents a single scope registration - */ -export interface ScopeRegistration { - readonly namespace: string - readonly values: Record - readonly priority: number -} - -/** - * Priority levels for scope sources. - * Higher values take precedence over lower values during merge. - */ -export enum ScopePriority { - /** System default values (os, default tool) */ - SystemDefault = 0, - /** Values from configuration file (profile, custom tool) */ - UserConfig = 10, - /** Values registered by plugins */ - PluginRegistered = 20, - /** Values passed at MDX compile time */ - CompileTime = 30 -} - -/** - * Registry for managing and merging scopes from multiple sources. - * Handles priority-based resolution when the same key exists in multiple sources. - */ -export class ScopeRegistry { - private readonly registrations: ScopeRegistration[] = [] - private globalScope: MdxGlobalScope | null = null - - setGlobalScope(scope: MdxGlobalScope): void { - this.globalScope = scope - } - - getGlobalScope(): MdxGlobalScope | null { - return this.globalScope - } - - register( - namespace: string, - values: Record, - priority: ScopePriority = ScopePriority.PluginRegistered - ): void { - this.registrations.push({namespace, values, priority}) - } - - getRegistrations(): readonly ScopeRegistration[] { - return this.registrations - } - - merge(compileTimeScope?: EvaluationScope): EvaluationScope { - const result: EvaluationScope = {} - - if (this.globalScope != null) { // 1. First add global scope (lowest priority) - result['os'] = {...this.globalScope.os} - result['env'] = {...this.globalScope.env} - result['profile'] = {...this.globalScope.profile} - result['tool'] = {...this.globalScope.tool} - } - - const sorted = [...this.registrations].sort((a, b) => a.priority - b.priority) // 2. Sort by priority and merge registered scopes - for (const reg of sorted) result[reg.namespace] = this.deepMerge(result[reg.namespace] as Record | undefined, reg.values) - - if (compileTimeScope != null) { // 3. Finally merge compile-time scope (highest priority) - for (const [key, value] of Object.entries(compileTimeScope)) { - result[key] = typeof value === 'object' && value !== null && !Array.isArray(value) - ? this.deepMerge(result[key] as Record | undefined, value as Record) - : value - } - } - - return result - } - - private deepMerge( - target: Record | undefined, - source: Record - ): Record { - if (target == null) return {...source} - - const result = {...target} - for (const [key, value] of Object.entries(source)) { - result[key] = typeof value === 'object' - && value !== null - && !Array.isArray(value) - && typeof result[key] === 'object' - && result[key] !== null - && !Array.isArray(result[key]) - ? this.deepMerge(result[key] as Record, value as Record) - : value - } - return result - } - - resolve(expression: string): string { - const scope = this.merge() - return expression.replaceAll(/\$\{([^}]+)\}/g, (_, key: string) => { - const parts = key.split('.') - let value: unknown = scope - for (const part of parts) value = (value as Record)?.[part] - return value != null ? String(value) : `\${${key}}` - }) - } - - clear(): void { - this.registrations.length = 0 - this.globalScope = null - } -} diff --git a/cli/src/plugins/plugin-input-shared/scope/index.ts b/cli/src/plugins/plugin-input-shared/scope/index.ts deleted file mode 100644 index be015465..00000000 --- a/cli/src/plugins/plugin-input-shared/scope/index.ts +++ /dev/null @@ -1,14 +0,0 @@ -export { // Public API exports for the scope management module. // src/scope/index.ts - GlobalScopeCollector -} from './GlobalScopeCollector' -export type { - GlobalScopeCollectorOptions -} from './GlobalScopeCollector' - -export { - ScopePriority, - ScopeRegistry -} from './ScopeRegistry' -export type { - ScopeRegistration -} from './ScopeRegistry' diff --git a/cli/src/plugins/plugin-jetbrains-ai-codex/index.ts b/cli/src/plugins/plugin-jetbrains-ai-codex.ts similarity index 100% rename from cli/src/plugins/plugin-jetbrains-ai-codex/index.ts rename to cli/src/plugins/plugin-jetbrains-ai-codex.ts diff --git a/cli/src/plugins/plugin-jetbrains-ai-codex/JetBrainsAIAssistantCodexOutputPlugin.ts b/cli/src/plugins/plugin-jetbrains-ai-codex/JetBrainsAIAssistantCodexOutputPlugin.ts deleted file mode 100644 index e2d33f36..00000000 --- a/cli/src/plugins/plugin-jetbrains-ai-codex/JetBrainsAIAssistantCodexOutputPlugin.ts +++ /dev/null @@ -1,606 +0,0 @@ -import type { - CommandPrompt, - OutputPluginContext, - OutputWriteContext, - Project, - ProjectChildrenMemoryPrompt, - SkillPrompt, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {getPlatformFixedDir} from '@truenine/desk-paths' -import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' -import {AbstractOutputPlugin, filterCommandsByProjectConfig, filterSkillsByProjectConfig} from '@truenine/plugin-output-shared' -import {FilePathKind, PLUGIN_NAMES} from '../plugin-shared' - -/** - * Represents the filename of the project memory file. - */ -const PROJECT_MEMORY_FILE = 'AGENTS.md' -/** - * Specifies the name of the subdirectory where prompt files are stored. - */ -const PROMPTS_SUBDIR = 'prompts' -/** - * Represents the name of the subdirectory where skill-related resources are stored. - */ -const SKILLS_SUBDIR = 'skills' -/** - * The file name that represents the skill definition file. - */ -const SKILL_FILE_NAME = 'SKILL.md' -const AIASSISTANT_DIR = '.aiassistant' -const RULES_SUBDIR = 'rules' -const ROOT_RULE_FILE = 'always.md' -const CHILD_RULE_FILE_PREFIX = 'glob-' -const RULE_APPLY_ALWAYS = '\u59CB\u7EC8' -const RULE_APPLY_GLOB = '\u6309\u6587\u4EF6\u6A21\u5F0F' -const RULE_GLOB_KEY = '\u6A21\u5F0F' -/** - * Represents the directory name used for storing JetBrains-related resources or files. - */ -const JETBRAINS_VENDOR_DIR = 'JetBrains' -/** - * Represents the directory path where the AIA files are stored. - */ -const AIA_DIR = 'aia' -/** - * Represents the directory path where the Codex-related files are stored. - */ -const CODEX_DIR = 'codex' - -/** - * An array of constant string literals representing the prefixes of JetBrains IDE directory names. - */ -const IDE_DIR_PREFIXES = [ - 'IntelliJIdea', - 'WebStorm', - 'RustRover', - 'PyCharm', - 'PyCharmCE', - 'PhpStorm', - 'GoLand', - 'CLion', - 'DataGrip', - 'RubyMine', - 'Rider', - 'DataSpell', - 'Aqua' -] as const - -/** - * Represents an output plugin specifically designed for integration with JetBrains AI Assistant Codex. - */ -export class JetBrainsAIAssistantCodexOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('JetBrainsAIAssistantCodexOutputPlugin', { - outputFileName: PROJECT_MEMORY_FILE, - dependsOn: [PLUGIN_NAMES.AgentsOutput], - indexignore: '.aiignore' - }) - } - - async registerProjectOutputDirs(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - results.push(this.createProjectRulesDirRelativePath(project.dirFromWorkspacePath)) - } - - return results - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - if (project.rootMemoryPrompt != null) results.push(this.createProjectRuleFileRelativePath(projectDir, ROOT_RULE_FILE)) - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) { - const fileName = this.buildChildRuleFileName(child) - results.push(this.createProjectRuleFileRelativePath(projectDir, fileName)) - } - } - } - - results.push(...this.registerProjectIgnoreOutputFiles(projects)) - return results - } - - async registerGlobalOutputDirs(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const codexDirs = this.resolveCodexDirs() - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - - for (const codexDir of codexDirs) { - const promptsPath = path.join(codexDir, PROMPTS_SUBDIR) - results.push({ - pathKind: FilePathKind.Relative, - path: PROMPTS_SUBDIR, - basePath: codexDir, - getDirectoryName: () => PROMPTS_SUBDIR, - getAbsolutePath: () => promptsPath - }) - - const {skills} = ctx.collectedInputContext - if (skills == null || skills.length === 0) continue - - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter?.name ?? skill.dir.getDirectoryName() - const skillPath = path.join(codexDir, SKILLS_SUBDIR, skillName) - results.push({ - pathKind: FilePathKind.Relative, - path: path.join(SKILLS_SUBDIR, skillName), - basePath: codexDir, - getDirectoryName: () => skillName, - getAbsolutePath: () => skillPath - }) - } - } - - return results - } - - async registerGlobalOutputFiles(): Promise { - const codexDirs = this.resolveCodexDirs() - return codexDirs.map(codexDir => ({ - pathKind: FilePathKind.Relative, - path: PROJECT_MEMORY_FILE, - basePath: codexDir, - getDirectoryName: () => CODEX_DIR, - getAbsolutePath: () => path.join(codexDir, PROJECT_MEMORY_FILE) - })) - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {globalMemory, commands, skills, workspace, aiAgentIgnoreConfigFiles} = ctx.collectedInputContext - const hasGlobalMemory = globalMemory != null - const hasFastCommands = (commands?.length ?? 0) > 0 - const hasSkills = (skills?.length ?? 0) > 0 - const hasProjectPrompts = workspace.projects.some( - project => project.rootMemoryPrompt != null || (project.childMemoryPrompts?.length ?? 0) > 0 - ) - const hasAiIgnore = aiAgentIgnoreConfigFiles?.some(f => f.fileName === '.aiignore') ?? false - - if (hasGlobalMemory || hasFastCommands || hasSkills || hasProjectPrompts || hasAiIgnore) return true - - this.log.trace({action: 'skip', reason: 'noOutputs'}) - return false - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const {projects} = ctx.collectedInputContext.workspace - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - if (project.rootMemoryPrompt != null) { - const content = this.buildAlwaysRuleContent(project.rootMemoryPrompt.content as string) - const result = await this.writeProjectRuleFile(ctx, project, ROOT_RULE_FILE, content, 'projectRootRule') - fileResults.push(result) - } - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) { - const fileName = this.buildChildRuleFileName(child) - const content = this.buildGlobRuleContent(child) - const result = await this.writeProjectRuleFile(ctx, project, fileName, content, 'projectChildRule') - fileResults.push(result) - } - } - } - - const ignoreResults = await this.writeProjectIgnoreFiles(ctx) - fileResults.push(...ignoreResults) - - return {files: fileResults, dirs: dirResults} - } - - async writeGlobalOutputs(ctx: OutputWriteContext): Promise { - const {globalMemory, commands, skills} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - const codexDirs = this.resolveCodexDirs() - - if (codexDirs.length === 0) return {files: fileResults, dirs: dirResults} - - const filteredCommands = commands != null ? filterCommandsByProjectConfig(commands, projectConfig) : [] - const filteredSkills = skills != null ? filterSkillsByProjectConfig(skills, projectConfig) : [] - - for (const codexDir of codexDirs) { - if (globalMemory != null) { - const fullPath = path.join(codexDir, PROJECT_MEMORY_FILE) - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: PROJECT_MEMORY_FILE, - basePath: codexDir, - getDirectoryName: () => CODEX_DIR, - getAbsolutePath: () => fullPath - } - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'globalMemory', path: fullPath}) - fileResults.push({path: relativePath, success: true, skipped: false}) - } else { - try { - this.ensureDirectory(codexDir) - fs.writeFileSync(fullPath, globalMemory.content as string, 'utf8') - this.log.trace({action: 'write', type: 'globalMemory', path: fullPath}) - fileResults.push({path: relativePath, success: true}) - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'globalMemory', path: fullPath, error: errMsg}) - fileResults.push({path: relativePath, success: false, error: error as Error}) - } - } - } - - if (filteredCommands.length > 0) { - for (const cmd of filteredCommands) { - const cmdResults = await this.writeGlobalCommand(ctx, codexDir, cmd) - fileResults.push(...cmdResults) - } - } - - if (filteredSkills.length === 0) continue - - for (const skill of filteredSkills) { - const skillResults = await this.writeGlobalSkill(ctx, codexDir, skill) - fileResults.push(...skillResults) - } - } - - return {files: fileResults, dirs: dirResults} - } - - private resolveCodexDirs(): string[] { - const baseDir = path.join(getPlatformFixedDir(), JETBRAINS_VENDOR_DIR) - if (!this.existsSync(baseDir)) return [] - - try { - const dirents = this.readdirSync(baseDir, {withFileTypes: true}) - const ideDirs = dirents.filter(dirent => { - if (!dirent.isDirectory()) return false - return this.isSupportedIdeDir(dirent.name) - }) - return ideDirs.map(dirent => path.join(baseDir, dirent.name, AIA_DIR, CODEX_DIR)) - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.warn({action: 'scan', type: 'jetbrains', path: baseDir, error: errMsg}) - return [] - } - } - - private createProjectRulesDirRelativePath(projectDir: RelativePath): RelativePath { - const rulesDirPath = path.join(projectDir.path, AIASSISTANT_DIR, RULES_SUBDIR) - return { - pathKind: FilePathKind.Relative, - path: rulesDirPath, - basePath: projectDir.basePath, - getDirectoryName: () => RULES_SUBDIR, - getAbsolutePath: () => path.join(projectDir.basePath, rulesDirPath) - } - } - - private createProjectRuleFileRelativePath(projectDir: RelativePath, fileName: string): RelativePath { - const filePath = path.join(projectDir.path, AIASSISTANT_DIR, RULES_SUBDIR, fileName) - return { - pathKind: FilePathKind.Relative, - path: filePath, - basePath: projectDir.basePath, - getDirectoryName: () => RULES_SUBDIR, - getAbsolutePath: () => path.join(projectDir.basePath, filePath) - } - } - - private buildChildRuleFileName(child: ProjectChildrenMemoryPrompt): string { - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const normalizedPath = childPath - .replaceAll('\\', '/') - .replaceAll(/^\/+|\/+$/g, '') - .replaceAll('/', '-') - - const suffix = normalizedPath.length > 0 ? normalizedPath : 'root' - return `${CHILD_RULE_FILE_PREFIX}${suffix}.md` - } - - private buildChildRulePattern(child: ProjectChildrenMemoryPrompt): string { - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const normalizedPath = childPath - .replaceAll('\\', '/') - .replaceAll(/^\/+|\/+$/g, '') - - if (normalizedPath.length === 0) return '**/*' - return `${normalizedPath}/**` - } - - private buildAlwaysRuleContent(content: string): string { - const fmData: Record = { - apply: RULE_APPLY_ALWAYS - } - - return buildMarkdownWithFrontMatter(fmData, content) - } - - private buildGlobRuleContent(child: ProjectChildrenMemoryPrompt): string { - const pattern = this.buildChildRulePattern(child) - const fmData: Record = { - apply: RULE_APPLY_GLOB, - [RULE_GLOB_KEY]: pattern - } - - return buildMarkdownWithFrontMatter(fmData, child.content as string) - } - - private async writeProjectRuleFile( - ctx: OutputWriteContext, - project: Project, - fileName: string, - content: string, - label: string - ): Promise { - const projectDir = project.dirFromWorkspacePath! - const rulesDir = path.join(projectDir.basePath, projectDir.path, AIASSISTANT_DIR, RULES_SUBDIR) - const fullPath = path.join(rulesDir, fileName) - - const relativePath = this.createProjectRuleFileRelativePath(projectDir, fileName) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: label, path: fullPath}) - return {path: relativePath, success: true, skipped: false} - } - - try { - this.ensureDirectory(rulesDir) - fs.writeFileSync(fullPath, content, 'utf8') - this.log.trace({action: 'write', type: label, path: fullPath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: label, path: fullPath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - private isSupportedIdeDir(dirName: string): boolean { - return IDE_DIR_PREFIXES.some(prefix => dirName.startsWith(prefix)) - } - - private async writeGlobalCommand( - ctx: OutputWriteContext, - codexDir: string, - cmd: CommandPrompt - ): Promise { - const results: WriteResult[] = [] - const transformOptions = this.getTransformOptionsFromContext(ctx) - const fileName = this.transformCommandName(cmd, transformOptions) - const targetDir = path.join(codexDir, PROMPTS_SUBDIR) - const fullPath = path.join(targetDir, fileName) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: path.join(PROMPTS_SUBDIR, fileName), - basePath: codexDir, - getDirectoryName: () => PROMPTS_SUBDIR, - getAbsolutePath: () => fullPath - } - - const content = this.buildMarkdownContentWithRaw( - cmd.content, - cmd.yamlFrontMatter, - cmd.rawFrontMatter - ) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'globalFastCommand', path: fullPath}) - return [{path: relativePath, success: true, skipped: false}] - } - - try { - this.ensureDirectory(targetDir) - fs.writeFileSync(fullPath, content, 'utf8') - this.log.trace({action: 'write', type: 'globalFastCommand', path: fullPath}) - results.push({path: relativePath, success: true}) - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'globalFastCommand', path: fullPath, error: errMsg}) - results.push({path: relativePath, success: false, error: error as Error}) - } - - return results - } - - private async writeGlobalSkill( - ctx: OutputWriteContext, - codexDir: string, - skill: SkillPrompt - ): Promise { - const results: WriteResult[] = [] - const skillName = skill.yamlFrontMatter?.name ?? skill.dir.getDirectoryName() - const targetDir = path.join(codexDir, SKILLS_SUBDIR, skillName) - const fullPath = path.join(targetDir, SKILL_FILE_NAME) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: path.join(SKILLS_SUBDIR, skillName, SKILL_FILE_NAME), - basePath: codexDir, - getDirectoryName: () => skillName, - getAbsolutePath: () => fullPath - } - - const content = this.buildCodexSkillContent(skill) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'globalSkill', path: fullPath}) - return [{path: relativePath, success: true, skipped: false}] - } - - try { - this.ensureDirectory(targetDir) - fs.writeFileSync(fullPath, content, 'utf8') - this.log.trace({action: 'write', type: 'globalSkill', path: fullPath}) - results.push({path: relativePath, success: true}) - - if (skill.childDocs != null) { - for (const refDoc of skill.childDocs) { - const refResults = await this.writeSkillReferenceDocument(ctx, targetDir, skillName, refDoc, codexDir) - results.push(...refResults) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - const resourceResults = await this.writeSkillResource(ctx, targetDir, skillName, resource, codexDir) - results.push(...resourceResults) - } - } - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'globalSkill', path: fullPath, error: errMsg}) - results.push({path: relativePath, success: false, error: error as Error}) - } - - return results - } - - private buildCodexSkillContent(skill: SkillPrompt): string { - const fm = skill.yamlFrontMatter - - const name = this.normalizeSkillName(fm.name, 64) - const description = this.normalizeToSingleLine(fm.description, 1024) - - const metadata: Record = {} - - if (fm.displayName != null) metadata['short-description'] = fm.displayName - if (fm.version != null) metadata['version'] = fm.version - if (fm.author != null) metadata['author'] = fm.author - if (fm.keywords != null && fm.keywords.length > 0) metadata['keywords'] = [...fm.keywords] - - const fmData: Record = { - name, - description - } - - if (Object.keys(metadata).length > 0) fmData['metadata'] = metadata - if (fm.allowTools != null && fm.allowTools.length > 0) fmData['allowed-tools'] = fm.allowTools.join(' ') - - return buildMarkdownWithFrontMatter(fmData, skill.content as string) - } - - private normalizeSkillName(name: string, maxLength: number): string { - let normalized = name - .toLowerCase() - .replaceAll(/[^a-z0-9-]/g, '-') - .replaceAll(/-+/g, '-') - .replaceAll(/^-+|-+$/g, '') - - if (normalized.length > maxLength) normalized = normalized.slice(0, maxLength).replace(/-+$/, '') - - return normalized - } - - private normalizeToSingleLine(text: string, maxLength: number): string { - const singleLine = text.replaceAll(/[\r\n]+/g, ' ').replaceAll(/\s+/g, ' ').trim() - if (singleLine.length > maxLength) return `${singleLine.slice(0, maxLength - 3)}...` - return singleLine - } - - private async writeSkillReferenceDocument( - ctx: OutputWriteContext, - skillDir: string, - skillName: string, - refDoc: {dir: RelativePath, content: unknown}, - codexDir: string - ): Promise { - const results: WriteResult[] = [] - const fileName = refDoc.dir.path.replace(/\.mdx$/, '.md') - const fullPath = path.join(skillDir, fileName) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: path.join(SKILLS_SUBDIR, skillName, fileName), - basePath: codexDir, - getDirectoryName: () => skillName, - getAbsolutePath: () => fullPath - } - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'skillRefDoc', path: fullPath}) - return [{path: relativePath, success: true, skipped: false}] - } - - try { - const parentDir = path.dirname(fullPath) - this.ensureDirectory(parentDir) - fs.writeFileSync(fullPath, refDoc.content as string, 'utf8') - this.log.trace({action: 'write', type: 'skillRefDoc', path: fullPath}) - results.push({path: relativePath, success: true}) - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'skillRefDoc', path: fullPath, error: errMsg}) - results.push({path: relativePath, success: false, error: error as Error}) - } - - return results - } - - private async writeSkillResource( - ctx: OutputWriteContext, - skillDir: string, - skillName: string, - resource: {relativePath: string, content: string}, - codexDir: string - ): Promise { - const results: WriteResult[] = [] - const fullPath = path.join(skillDir, resource.relativePath) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: path.join(SKILLS_SUBDIR, skillName, resource.relativePath), - basePath: codexDir, - getDirectoryName: () => skillName, - getAbsolutePath: () => fullPath - } - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'skillResource', path: fullPath}) - return [{path: relativePath, success: true, skipped: false}] - } - - try { - const parentDir = path.dirname(fullPath) - this.ensureDirectory(parentDir) - fs.writeFileSync(fullPath, resource.content, 'utf8') - this.log.trace({action: 'write', type: 'skillResource', path: fullPath}) - results.push({path: relativePath, success: true}) - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'skillResource', path: fullPath, error: errMsg}) - results.push({path: relativePath, success: false, error: error as Error}) - } - - return results - } -} diff --git a/cli/src/plugins/plugin-jetbrains-codestyle/index.ts b/cli/src/plugins/plugin-jetbrains-codestyle.ts similarity index 100% rename from cli/src/plugins/plugin-jetbrains-codestyle/index.ts rename to cli/src/plugins/plugin-jetbrains-codestyle.ts diff --git a/cli/src/plugins/plugin-jetbrains-codestyle/JetBrainsIDECodeStyleConfigOutputPlugin.ts b/cli/src/plugins/plugin-jetbrains-codestyle/JetBrainsIDECodeStyleConfigOutputPlugin.ts deleted file mode 100644 index b9dcfc8c..00000000 --- a/cli/src/plugins/plugin-jetbrains-codestyle/JetBrainsIDECodeStyleConfigOutputPlugin.ts +++ /dev/null @@ -1,144 +0,0 @@ -import type { - OutputPluginContext, - OutputWriteContext, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import {AbstractOutputPlugin} from '@truenine/plugin-output-shared' -import {FilePathKind, IDEKind} from '../plugin-shared' - -const IDEA_DIR = '.idea' -const CODE_STYLES_DIR = 'codeStyles' - -/** - * Default JetBrains IDE config files that this plugin manages. - * These are the relative paths within each project directory. - */ -const JETBRAINS_CONFIG_FILES = [ - '.editorconfig', - '.idea/codeStyles/Project.xml', - '.idea/codeStyles/codeStyleConfig.xml', - '.idea/.gitignore' -] as const - -export class JetBrainsIDECodeStyleConfigOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('JetBrainsIDECodeStyleConfigOutputPlugin') - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - const {jetbrainsConfigFiles, editorConfigFiles} = ctx.collectedInputContext - - const hasJetBrainsConfigs = (jetbrainsConfigFiles != null && jetbrainsConfigFiles.length > 0) - || (editorConfigFiles != null && editorConfigFiles.length > 0) - if (!hasJetBrainsConfigs) return results - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - if (project.isPromptSourceProject === true) continue - - for (const configFile of JETBRAINS_CONFIG_FILES) { - const filePath = this.joinPath(projectDir.path, configFile) - results.push({ - pathKind: FilePathKind.Relative, - path: filePath, - basePath: projectDir.basePath, - getDirectoryName: () => this.dirname(configFile), - getAbsolutePath: () => this.resolvePath(projectDir.basePath, filePath) - }) - } - } - - return results - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {jetbrainsConfigFiles, editorConfigFiles} = ctx.collectedInputContext - const hasIdeaConfigs = (jetbrainsConfigFiles != null && jetbrainsConfigFiles.length > 0) - || (editorConfigFiles != null && editorConfigFiles.length > 0) - - if (hasIdeaConfigs) return true - - this.log.debug('skipped', {reason: 'no JetBrains IDE config files found'}) - return false - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const {projects} = ctx.collectedInputContext.workspace - const {jetbrainsConfigFiles, editorConfigFiles} = ctx.collectedInputContext - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - const jetbrainsConfigs = [ - ...jetbrainsConfigFiles ?? [], - ...editorConfigFiles ?? [] - ] - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - const projectName = project.name ?? 'unknown' - - for (const config of jetbrainsConfigs) { - const result = await this.writeConfigFile(ctx, projectDir, config, `project:${projectName}`) - fileResults.push(result) - } - } - - return {files: fileResults, dirs: dirResults} - } - - private async writeConfigFile( - ctx: OutputWriteContext, - projectDir: RelativePath, - config: {type: IDEKind, content: string, dir: {path: string}}, - label: string - ): Promise { - const targetRelativePath = this.getTargetRelativePath(config) - const fullPath = this.resolvePath(projectDir.basePath, projectDir.path, targetRelativePath) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: this.joinPath(projectDir.path, targetRelativePath), - basePath: projectDir.basePath, - getDirectoryName: () => this.dirname(targetRelativePath), - getAbsolutePath: () => fullPath - } - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'config', path: fullPath, label}) - return {path: relativePath, success: true, skipped: false} - } - - try { - const dir = this.dirname(fullPath) - this.ensureDirectory(dir) - this.writeFileSync(fullPath, config.content) - this.log.trace({action: 'write', type: 'config', path: fullPath, label}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'config', path: fullPath, label, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - private getTargetRelativePath(config: {type: IDEKind, dir: {path: string}}): string { - const sourcePath = config.dir.path - - if (config.type === IDEKind.EditorConfig) return '.editorconfig' - - if (config.type !== IDEKind.IntellijIDEA) return this.basename(sourcePath) - - const ideaIndex = sourcePath.indexOf(IDEA_DIR) - if (ideaIndex !== -1) return sourcePath.slice(Math.max(0, ideaIndex)) - return this.joinPath(IDEA_DIR, CODE_STYLES_DIR, this.basename(sourcePath)) - } -} diff --git a/cli/src/plugins/plugin-openai-codex-cli/index.ts b/cli/src/plugins/plugin-openai-codex-cli.ts similarity index 100% rename from cli/src/plugins/plugin-openai-codex-cli/index.ts rename to cli/src/plugins/plugin-openai-codex-cli.ts diff --git a/cli/src/plugins/plugin-openai-codex-cli/CodexCLIOutputPlugin.ts b/cli/src/plugins/plugin-openai-codex-cli/CodexCLIOutputPlugin.ts deleted file mode 100644 index 241fedbb..00000000 --- a/cli/src/plugins/plugin-openai-codex-cli/CodexCLIOutputPlugin.ts +++ /dev/null @@ -1,108 +0,0 @@ -import type { - CommandPrompt, - OutputPluginContext, - OutputWriteContext, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import * as path from 'node:path' -import {AbstractOutputPlugin, filterCommandsByProjectConfig, filterSkillsByProjectConfig} from '@truenine/plugin-output-shared' -import {PLUGIN_NAMES} from '../plugin-shared' - -const PROJECT_MEMORY_FILE = 'AGENTS.md' -const GLOBAL_CONFIG_DIR = '.codex' -const PROMPTS_SUBDIR = 'prompts' -const SKILLS_SUBDIR = 'skills' - -export class CodexCLIOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('CodexCLIOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: PROJECT_MEMORY_FILE, - dependsOn: [PLUGIN_NAMES.AgentsOutput] - }) - } - - async registerProjectOutputDirs(): Promise { - return [] // Codex only supports global prompts and skills - } - - async registerProjectOutputFiles(): Promise { - return [] // AGENTS.md files are handled by AgentsOutputPlugin - } - - async registerGlobalOutputDirs(ctx: OutputPluginContext): Promise { - const globalDir = this.getGlobalConfigDir() - const results: RelativePath[] = [ - this.createRelativePath(PROMPTS_SUBDIR, globalDir, () => PROMPTS_SUBDIR) - ] - - const {skills} = ctx.collectedInputContext - if (skills == null || skills.length === 0) return results - - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter?.name ?? skill.dir.getDirectoryName() - results.push(this.createRelativePath( - path.join(SKILLS_SUBDIR, skillName), - globalDir, - () => skillName - )) - } - return results - } - - async registerGlobalOutputFiles(): Promise { - const globalDir = this.getGlobalConfigDir() - return [ - this.createRelativePath(PROJECT_MEMORY_FILE, globalDir, () => GLOBAL_CONFIG_DIR) - ] - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {globalMemory, commands} = ctx.collectedInputContext - if (globalMemory != null || (commands?.length ?? 0) > 0) return true - this.log.trace({action: 'skip', reason: 'noOutputs'}) - return false - } - - async writeProjectOutputs(): Promise { - return {files: [], dirs: []} // Handled by AgentsOutputPlugin - } - - async writeGlobalOutputs(ctx: OutputWriteContext): Promise { - const {globalMemory, commands} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const fileResults: WriteResult[] = [] - const globalDir = this.getGlobalConfigDir() - - if (globalMemory != null) { - const fullPath = path.join(globalDir, PROJECT_MEMORY_FILE) - const result = await this.writeFile(ctx, fullPath, globalMemory.content as string, 'globalMemory') - fileResults.push(result) - } - - if (commands == null || commands.length === 0) return {files: fileResults, dirs: []} - - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - for (const cmd of filteredCommands) { - const result = await this.writeGlobalCommand(ctx, globalDir, cmd) - fileResults.push(result) - } - return {files: fileResults, dirs: []} - } - - private async writeGlobalCommand( - ctx: OutputWriteContext, - globalDir: string, - cmd: CommandPrompt - ): Promise { - const transformOptions = this.getTransformOptionsFromContext(ctx) - const fileName = this.transformCommandName(cmd, transformOptions) - const fullPath = path.join(globalDir, PROMPTS_SUBDIR, fileName) - const content = this.buildMarkdownContentWithRaw(cmd.content, cmd.yamlFrontMatter, cmd.rawFrontMatter) - return this.writeFile(ctx, fullPath, content, 'globalFastCommand') - } -} diff --git a/cli/src/plugins/plugin-opencode-cli/index.ts b/cli/src/plugins/plugin-opencode-cli.ts similarity index 100% rename from cli/src/plugins/plugin-opencode-cli/index.ts rename to cli/src/plugins/plugin-opencode-cli.ts diff --git a/cli/src/plugins/plugin-opencode-cli/OpencodeCLIOutputPlugin.ts b/cli/src/plugins/plugin-opencode-cli/OpencodeCLIOutputPlugin.ts deleted file mode 100644 index 2ea7b502..00000000 --- a/cli/src/plugins/plugin-opencode-cli/OpencodeCLIOutputPlugin.ts +++ /dev/null @@ -1,579 +0,0 @@ -import type {CommandPrompt, OutputPluginContext, OutputWriteContext, RulePrompt, SkillPrompt, SubAgentPrompt, WriteResult, WriteResults} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import * as fs from 'node:fs' -import * as path from 'node:path' -import { - applySubSeriesGlobPrefix, - BaseCLIOutputPlugin, - filterCommandsByProjectConfig, - filterRulesByProjectConfig, - filterSkillsByProjectConfig, - filterSubAgentsByProjectConfig, - McpConfigManager, - transformMcpConfigForOpencode -} from '@truenine/plugin-output-shared' -import {FilePathKind, PLUGIN_NAMES} from '../plugin-shared' - -const GLOBAL_MEMORY_FILE = 'AGENTS.md' -const GLOBAL_CONFIG_DIR = '.config/opencode' -const OPENCODE_CONFIG_FILE = 'opencode.json' -const OPENCODE_RULES_PLUGIN_NAME = 'opencode-rules@latest' -const PROJECT_RULES_DIR = '.opencode' -const RULES_SUBDIR = 'rules' -const RULE_FILE_PREFIX = 'rule-' - -/** - * Opencode CLI output plugin. - * Outputs global memory, commands, agents, and skills to ~/.config/opencode/ - */ -export class OpencodeCLIOutputPlugin extends BaseCLIOutputPlugin { - constructor() { - super('OpencodeCLIOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: GLOBAL_MEMORY_FILE, - commandsSubDir: 'commands', - agentsSubDir: 'agents', - skillsSubDir: 'skills', - supportsCommands: true, - supportsSubAgents: true, - supportsSkills: true, - dependsOn: [PLUGIN_NAMES.AgentsOutput] - }) - - this.registerCleanEffect('mcp-config-cleanup', async ctx => { - const globalDir = this.getGlobalConfigDir() - const configPath = path.join(globalDir, OPENCODE_CONFIG_FILE) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'mcpConfigCleanup', path: configPath}) - return {success: true, description: 'Would reset opencode.json mcp to empty'} - } - - try { - if (fs.existsSync(configPath)) { - const existingContent = fs.readFileSync(configPath, 'utf8') - const existingConfig = JSON.parse(existingContent) as Record - existingConfig['mcp'] = {} - - const pluginField = existingConfig['plugin'] - if (Array.isArray(pluginField)) { - const filtered = pluginField.filter(item => item !== OPENCODE_RULES_PLUGIN_NAME) - if (filtered.length > 0) existingConfig['plugin'] = filtered - else delete existingConfig['plugin'] - } - - fs.writeFileSync(configPath, JSON.stringify(existingConfig, null, 2)) - } - this.log.trace({action: 'clean', type: 'mcpConfigCleanup', path: configPath}) - return {success: true, description: 'Reset opencode.json mcp to empty'} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'clean', type: 'mcpConfigCleanup', path: configPath, error: errMsg}) - return {success: false, error: error as Error, description: 'Failed to reset opencode.json mcp'} - } - }) - } - - override async registerGlobalOutputFiles(ctx: OutputPluginContext): Promise { - const results = await super.registerGlobalOutputFiles(ctx) - const globalDir = this.getGlobalConfigDir() - - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const filteredSkills = ctx.collectedInputContext.skills != null - ? filterSkillsByProjectConfig(ctx.collectedInputContext.skills, projectConfig) - : [] - const hasAnyMcpConfig = filteredSkills.some(s => s.mcpConfig != null) - if (hasAnyMcpConfig) { - const configPath = path.join(globalDir, OPENCODE_CONFIG_FILE) - results.push({ - pathKind: FilePathKind.Relative, - path: OPENCODE_CONFIG_FILE, - basePath: globalDir, - getDirectoryName: () => GLOBAL_CONFIG_DIR, - getAbsolutePath: () => configPath - }) - } - - const globalRules = ctx.collectedInputContext.rules?.filter(r => this.normalizeRuleScope(r) === 'global') - if (globalRules != null && globalRules.length > 0) { - const rulesDir = path.join(globalDir, RULES_SUBDIR) - for (const rule of globalRules) results.push(this.createRelativePath(this.buildRuleFileName(rule), rulesDir, () => RULES_SUBDIR)) - } - - return results.map(result => { // Normalize skill directory names in paths - const normalizedPath = result.path.replaceAll('\\', '/') - const skillsPatternWithSlash = `/${this.skillsSubDir}/` - const skillsPatternStart = `${this.skillsSubDir}/` - - if (!(normalizedPath.includes(skillsPatternWithSlash) || normalizedPath.startsWith(skillsPatternStart))) return result - - const pathParts = normalizedPath.split('/') - const skillsIndex = pathParts.indexOf(this.skillsSubDir) - if (skillsIndex < 0 || skillsIndex + 1 >= pathParts.length) return result - - const skillName = pathParts[skillsIndex + 1] - if (skillName == null) return result - - const normalizedSkillName = this.validateAndNormalizeSkillName(skillName) - const newPathParts = [...pathParts] - newPathParts[skillsIndex + 1] = normalizedSkillName - const newPath = newPathParts.join('/') - return { - ...result, - path: newPath, - getDirectoryName: () => normalizedSkillName, - getAbsolutePath: () => path.join(globalDir, newPath.replaceAll('/', path.sep)) - } - }) - } - - override async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - - for (const project of projects) { - if (project.rootMemoryPrompt != null && project.dirFromWorkspacePath != null) { - results.push(this.createFileRelativePath(project.dirFromWorkspacePath, this.outputFileName)) - } - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) { - if (child.dir != null && this.isRelativePath(child.dir)) results.push(this.createFileRelativePath(child.dir, this.outputFileName)) - } - } - - if (project.dirFromWorkspacePath == null) continue - - const {projectConfig} = project - const basePath = path.join(project.dirFromWorkspacePath.path, PROJECT_RULES_DIR) - const transformOptions = {includeSeriesPrefix: true} as const - - if (this.supportsCommands && ctx.collectedInputContext.commands != null) { - const filteredCommands = filterCommandsByProjectConfig(ctx.collectedInputContext.commands, projectConfig) - for (const cmd of filteredCommands) { - const fileName = this.transformCommandName(cmd, transformOptions) - results.push(this.createRelativePath(path.join(basePath, this.commandsSubDir, fileName), project.dirFromWorkspacePath.basePath, () => this.commandsSubDir)) - } - } - - if (this.supportsSubAgents && ctx.collectedInputContext.subAgents != null) { - const filteredSubAgents = filterSubAgentsByProjectConfig(ctx.collectedInputContext.subAgents, projectConfig) - for (const agent of filteredSubAgents) { - const fileName = agent.dir.path.replace(/\.mdx$/, '.md') - results.push(this.createRelativePath(path.join(basePath, this.agentsSubDir, fileName), project.dirFromWorkspacePath.basePath, () => this.agentsSubDir)) - } - } - - if (this.supportsSkills && ctx.collectedInputContext.skills != null) { - const filteredSkills = filterSkillsByProjectConfig(ctx.collectedInputContext.skills, projectConfig) - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter?.name ?? skill.dir.getDirectoryName() - const skillDir = path.join(basePath, this.skillsSubDir, skillName) - - results.push(this.createRelativePath(path.join(skillDir, 'SKILL.md'), project.dirFromWorkspacePath.basePath, () => skillName)) - - if (skill.childDocs != null) { - for (const refDoc of skill.childDocs) { - const refDocFileName = refDoc.dir.path.replace(/\.mdx$/, '.md') - results.push(this.createRelativePath(path.join(skillDir, refDocFileName), project.dirFromWorkspacePath.basePath, () => skillName)) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - results.push(this.createRelativePath(path.join(skillDir, resource.relativePath), project.dirFromWorkspacePath.basePath, () => skillName)) - } - } - } - } - - const {rules} = ctx.collectedInputContext // Add project rules - if (rules != null && rules.length > 0) { - const projectRules = applySubSeriesGlobPrefix( - filterRulesByProjectConfig(rules, project.projectConfig), - project.projectConfig - ) - for (const rule of projectRules) { - const filePath = path.join(project.dirFromWorkspacePath.path, PROJECT_RULES_DIR, RULES_SUBDIR, this.buildRuleFileName(rule)) - results.push(this.createRelativePath(filePath, project.dirFromWorkspacePath.basePath, () => RULES_SUBDIR)) - } - } - } - - return results.map(result => { - const normalizedPath = result.path.replaceAll('\\', '/') - const skillsPatternWithSlash = `/${this.skillsSubDir}/` - const skillsPatternStart = `${this.skillsSubDir}/` - - if (!(normalizedPath.includes(skillsPatternWithSlash) || normalizedPath.startsWith(skillsPatternStart))) return result - - const pathParts = normalizedPath.split('/') - const skillsIndex = pathParts.indexOf(this.skillsSubDir) - if (skillsIndex < 0 || skillsIndex + 1 >= pathParts.length) return result - - const skillName = pathParts[skillsIndex + 1] - if (skillName == null) return result - - const normalizedSkillName = this.validateAndNormalizeSkillName(skillName) - const newPathParts = [...pathParts] - newPathParts[skillsIndex + 1] = normalizedSkillName - const newPath = newPathParts.join('/') - return { - ...result, - path: newPath, - getDirectoryName: () => normalizedSkillName, - getAbsolutePath: () => path.join(result.basePath, newPath.replaceAll('/', path.sep)) - } - }) - } - - override async writeGlobalOutputs(ctx: OutputWriteContext): Promise { - const baseResults = await super.writeGlobalOutputs(ctx) - const files = [...baseResults.files] - - const {skills} = ctx.collectedInputContext - if (skills != null) { - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - const mcpResult = await this.writeGlobalMcpConfig(ctx, filteredSkills) - if (mcpResult != null) files.push(mcpResult) - } - - const globalRules = ctx.collectedInputContext.rules?.filter(r => this.normalizeRuleScope(r) === 'global') - if (globalRules == null || globalRules.length === 0) return {files, dirs: baseResults.dirs} - - const rulesDir = path.join(this.getGlobalConfigDir(), RULES_SUBDIR) - for (const rule of globalRules) files.push(await this.writeFile(ctx, path.join(rulesDir, this.buildRuleFileName(rule)), this.buildRuleContent(rule), 'rule')) - return {files, dirs: baseResults.dirs} - } - - private async writeGlobalMcpConfig( - ctx: OutputWriteContext, - skills: readonly SkillPrompt[] - ): Promise { - const manager = new McpConfigManager({fs, logger: this.log}) - - const servers = manager.collectMcpServers(skills) - if (servers.size === 0) return null - - const transformed = manager.transformMcpServers(servers, transformMcpConfigForOpencode) - const globalDir = this.getGlobalConfigDir() - const configPath = path.join(globalDir, OPENCODE_CONFIG_FILE) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: OPENCODE_CONFIG_FILE, - basePath: globalDir, - getDirectoryName: () => GLOBAL_CONFIG_DIR, - getAbsolutePath: () => configPath - } - - const existingConfig = manager.readExistingConfig(configPath) - const pluginField = existingConfig['plugin'] - const plugins: string[] = Array.isArray(pluginField) ? pluginField.map(item => String(item)) : [] - if (!plugins.includes(OPENCODE_RULES_PLUGIN_NAME)) plugins.push(OPENCODE_RULES_PLUGIN_NAME) - - const result = manager.writeOpencodeMcpConfig( - configPath, - transformed, - ctx.dryRun === true, - { - $schema: 'https://opencode.ai/config.json', - plugin: plugins - } - ) - - if (!result.success) { - if (result.error != null) return {path: relativePath, success: false, error: result.error} - return {path: relativePath, success: false} - } - - if (result.skipped === true) return {path: relativePath, success: true, skipped: true} - return {path: relativePath, success: true} - } - - protected override async writeSubAgent( - ctx: OutputWriteContext, - basePath: string, - agent: SubAgentPrompt - ): Promise { - const fileName = agent.dir.path.replace(/\.mdx$/, '.md') - const targetDir = path.join(basePath, this.agentsSubDir) - const fullPath = path.join(targetDir, fileName) - - const opencodeFrontMatter = this.buildOpencodeAgentFrontMatter(agent) - const content = this.buildMarkdownContent(agent.content, opencodeFrontMatter) - - return [await this.writeFile(ctx, fullPath, content, 'subAgent')] - } - - private buildOpencodeAgentFrontMatter(agent: SubAgentPrompt): Record { - const frontMatter: Record = {} - const source = agent.yamlFrontMatter as Record | undefined - - if (source?.['description'] != null) frontMatter['description'] = source['description'] - - frontMatter['mode'] = source?.['mode'] ?? 'subagent' - - if (source?.['model'] != null) frontMatter['model'] = source['model'] - if (source?.['temperature'] != null) frontMatter['temperature'] = source['temperature'] - if (source?.['maxSteps'] != null) frontMatter['maxSteps'] = source['maxSteps'] - if (source?.['hidden'] != null) frontMatter['hidden'] = source['hidden'] - - if (source?.['allowTools'] != null && Array.isArray(source['allowTools'])) { - const tools: Record = {} - for (const tool of source['allowTools']) tools[String(tool)] = true - frontMatter['tools'] = tools - } - - if (source?.['permission'] != null && typeof source['permission'] === 'object') frontMatter['permission'] = source['permission'] - - for (const [key, value] of Object.entries(source ?? {})) { - if (!['description', 'mode', 'model', 'temperature', 'maxSteps', 'hidden', 'allowTools', 'permission', 'namingCase', 'name', 'color'].includes(key)) { - frontMatter[key] = value - } - } - - return frontMatter - } - - protected override async writeCommand( - ctx: OutputWriteContext, - basePath: string, - cmd: CommandPrompt - ): Promise { - const transformOptions = this.getTransformOptionsFromContext(ctx) - const fileName = this.transformCommandName(cmd, transformOptions) - const targetDir = path.join(basePath, this.commandsSubDir) - const fullPath = path.join(targetDir, fileName) - - const opencodeFrontMatter = this.buildOpencodeCommandFrontMatter(cmd) - const content = this.buildMarkdownContent(cmd.content, opencodeFrontMatter) - - return [await this.writeFile(ctx, fullPath, content, 'command')] - } - - private buildOpencodeCommandFrontMatter(cmd: CommandPrompt): Record { - const frontMatter: Record = {} - const source = cmd.yamlFrontMatter as Record | undefined - - if (source?.['description'] != null) frontMatter['description'] = source['description'] - if (source?.['agent'] != null) frontMatter['agent'] = source['agent'] - if (source?.['model'] != null) frontMatter['model'] = source['model'] - - if (source?.['allowTools'] != null && Array.isArray(source['allowTools'])) { - const tools: Record = {} - for (const tool of source['allowTools']) tools[String(tool)] = true - frontMatter['tools'] = tools - } - - for (const [key, value] of Object.entries(source ?? {})) { - if (!['description', 'agent', 'model', 'allowTools', 'namingCase', 'argumentHint'].includes(key)) frontMatter[key] = value - } - - return frontMatter - } - - protected override async writeSkill( - ctx: OutputWriteContext, - basePath: string, - skill: SkillPrompt - ): Promise { - const results: WriteResult[] = [] - const skillName = this.validateAndNormalizeSkillName((skill.yamlFrontMatter?.name as string | undefined) ?? skill.dir.getDirectoryName()) - const targetDir = path.join(basePath, this.skillsSubDir, skillName) - const fullPath = path.join(targetDir, 'SKILL.md') - - const opencodeFrontMatter = this.buildOpencodeSkillFrontMatter(skill, skillName) - const content = this.buildMarkdownContent(skill.content as string, opencodeFrontMatter) - - const mainFileResult = await this.writeFile(ctx, fullPath, content, 'skill') - results.push(mainFileResult) - - if (skill.childDocs != null) { - for (const refDoc of skill.childDocs) { - const refResults = await this.writeSkillReferenceDocument(ctx, targetDir, skillName, refDoc, basePath) - results.push(...refResults) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - const refResults = await this.writeSkillResource(ctx, targetDir, skillName, resource, basePath) - results.push(...refResults) - } - } - - return results - } - - private buildOpencodeSkillFrontMatter(skill: SkillPrompt, skillName: string): Record { - const frontMatter: Record = {} - const source = skill.yamlFrontMatter as Record | undefined - - frontMatter['name'] = skillName - if (source?.['description'] != null) frontMatter['description'] = source['description'] - - frontMatter['license'] = source?.['license'] ?? 'MIT' - frontMatter['compatibility'] = source?.['compatibility'] ?? 'opencode' - - const metadata: Record = {} - const metadataFields = ['author', 'version', 'keywords', 'category', 'repository', 'displayName'] - - for (const field of metadataFields) { - if (source?.[field] != null) metadata[field] = source[field] - } - - const reservedFields = new Set(['name', 'description', 'license', 'compatibility', 'namingCase', 'allowTools', 'keywords', 'displayName', 'author', 'version']) - for (const [key, value] of Object.entries(source ?? {})) { - if (!reservedFields.has(key)) metadata[key] = value - } - - if (Object.keys(metadata).length > 0) frontMatter['metadata'] = metadata - - return frontMatter - } - - private validateAndNormalizeSkillName(name: string): string { - let normalized = name.toLowerCase() - normalized = normalized.replaceAll(/[^a-z0-9-]+/g, '-') - normalized = normalized.replaceAll(/-+/g, '-') - normalized = normalized.replaceAll(/^-|-$/g, '') - - if (normalized.length === 0) normalized = 'skill' - else if (normalized.length > 64) { - normalized = normalized.slice(0, 64) - normalized = normalized.replace(/-$/, '') - } - - return normalized - } - - protected override buildRuleFileName(rule: RulePrompt, prefix: string = RULE_FILE_PREFIX): string { - return `${prefix}${rule.series}-${rule.ruleName}.md` - } - - protected override buildRuleContent(rule: RulePrompt): string { - if (rule.globs.length === 0) return rule.content - return this.buildMarkdownContent(rule.content, {globs: [...rule.globs]}) - } - - override async registerGlobalOutputDirs(ctx: OutputPluginContext): Promise { - const results = await super.registerGlobalOutputDirs(ctx) - const globalRules = ctx.collectedInputContext.rules?.filter(r => this.normalizeRuleScope(r) === 'global') - if (globalRules != null && globalRules.length > 0) results.push(this.createRelativePath(RULES_SUBDIR, this.getGlobalConfigDir(), () => RULES_SUBDIR)) - return results - } - - override async registerProjectOutputDirs(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - - const subdirs: string[] = [] - if (this.supportsCommands) subdirs.push(this.commandsSubDir) - if (this.supportsSubAgents) subdirs.push(this.agentsSubDir) - if (this.supportsSkills) subdirs.push(this.skillsSubDir) - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - for (const subdir of subdirs) { - const dirPath = path.join(project.dirFromWorkspacePath.path, PROJECT_RULES_DIR, subdir) - results.push(this.createRelativePath(dirPath, project.dirFromWorkspacePath.basePath, () => subdir)) - } - } - - const {rules} = ctx.collectedInputContext - if (rules != null && rules.length > 0) { - for (const project of ctx.collectedInputContext.workspace.projects) { - if (project.dirFromWorkspacePath == null) continue - const projectRules = applySubSeriesGlobPrefix( - filterRulesByProjectConfig( - rules.filter(r => this.normalizeRuleScope(r) === 'project'), - project.projectConfig - ), - project.projectConfig - ) - if (projectRules.length === 0) continue - const dirPath = path.join(project.dirFromWorkspacePath.path, PROJECT_RULES_DIR, RULES_SUBDIR) - results.push(this.createRelativePath(dirPath, project.dirFromWorkspacePath.basePath, () => RULES_SUBDIR)) - } - } - return results - } - - override async canWrite(ctx: OutputWriteContext): Promise { - if ((ctx.collectedInputContext.rules?.length ?? 0) > 0) return true - return super.canWrite(ctx) - } - - override async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - for (const project of ctx.collectedInputContext.workspace.projects) { - if (project.dirFromWorkspacePath == null) continue - - const projectDir = project.dirFromWorkspacePath - const {projectConfig} = project - const basePath = path.join(projectDir.basePath, projectDir.path, PROJECT_RULES_DIR) - - if (project.rootMemoryPrompt != null) { - const result = await this.writePromptFile(ctx, projectDir, project.rootMemoryPrompt.content as string, `project:${project.name}/root`) - fileResults.push(result) - } - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) { - const childResult = await this.writePromptFile(ctx, child.dir, child.content as string, `project:${project.name}/child:${child.workingChildDirectoryPath?.path ?? 'unknown'}`) - fileResults.push(childResult) - } - } - - if (this.supportsCommands && ctx.collectedInputContext.commands != null) { - const filteredCommands = filterCommandsByProjectConfig(ctx.collectedInputContext.commands, projectConfig) - for (const cmd of filteredCommands) { - const cmdResults = await this.writeCommand(ctx, basePath, cmd) - fileResults.push(...cmdResults) - } - } - - if (this.supportsSubAgents && ctx.collectedInputContext.subAgents != null) { - const filteredSubAgents = filterSubAgentsByProjectConfig(ctx.collectedInputContext.subAgents, projectConfig) - for (const agent of filteredSubAgents) { - const agentResults = await this.writeSubAgent(ctx, basePath, agent) - fileResults.push(...agentResults) - } - } - - if (this.supportsSkills && ctx.collectedInputContext.skills != null) { - const filteredSkills = filterSkillsByProjectConfig(ctx.collectedInputContext.skills, projectConfig) - for (const skill of filteredSkills) { - const skillResults = await this.writeSkill(ctx, basePath, skill) - fileResults.push(...skillResults) - } - } - } - - const {rules} = ctx.collectedInputContext - if (rules != null && rules.length > 0) { - for (const project of ctx.collectedInputContext.workspace.projects) { - if (project.dirFromWorkspacePath == null) continue - const projectRules = applySubSeriesGlobPrefix( - filterRulesByProjectConfig( - rules.filter(r => this.normalizeRuleScope(r) === 'project'), - project.projectConfig - ), - project.projectConfig - ) - if (projectRules.length === 0) continue - const rulesDir = path.join(project.dirFromWorkspacePath.basePath, project.dirFromWorkspacePath.path, PROJECT_RULES_DIR, RULES_SUBDIR) - for (const rule of projectRules) { - fileResults.push(await this.writeFile(ctx, path.join(rulesDir, this.buildRuleFileName(rule)), this.buildRuleContent(rule), 'rule')) - } - } - } - - return {files: fileResults, dirs: dirResults} - } -} diff --git a/cli/src/plugins/plugin-output-shared/AbstractOutputPlugin.ts b/cli/src/plugins/plugin-output-shared/AbstractOutputPlugin.ts deleted file mode 100644 index cebcf46e..00000000 --- a/cli/src/plugins/plugin-output-shared/AbstractOutputPlugin.ts +++ /dev/null @@ -1,701 +0,0 @@ -import type {Buffer} from 'node:buffer' -import type {CleanEffectHandler, CommandPrompt, CommandSeriesPluginOverride, EffectRegistration, EffectResult, ILogger, OutputCleanContext, OutputPlugin, OutputPluginContext, OutputWriteContext, Project, RegistryOperationResult, RulePrompt, RuleScope, SkillPrompt, WriteEffectHandler, WriteResult, WriteResults} from '../plugin-shared' - -import type {Path, ProjectConfig, RegistryData, RelativePath} from '../plugin-shared/types' -import type {RegistryWriter} from './registry/RegistryWriter' -import * as fs from 'node:fs' -import * as os from 'node:os' -import * as path from 'node:path' -import process from 'node:process' -import { - createFileRelativePath as deskCreateFileRelativePath, - createRelativePath as deskCreateRelativePath, - createSymlink as deskCreateSymlink, - ensureDir as deskEnsureDir, - isSymlink as deskIsSymlink, - lstatSync as deskLstatSync, - removeSymlink as deskRemoveSymlink, - writeFileSync as deskWriteFileSync -} from '@truenine/desk-paths' -import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' -import { - AbstractPlugin, - FilePathKind, - PluginKind -} from '../plugin-shared' - -/** - * Options for building skill front matter - */ -export interface SkillFrontMatterOptions { - readonly includeTools?: boolean - readonly toolFormat?: 'array' | 'string' - readonly additionalFields?: Record -} - -/** - * Options for building rule content - */ -export interface RuleContentOptions { - readonly fileExtension: '.mdc' | '.md' - readonly alwaysApply: boolean - readonly globJoinPattern: ', ' | '|' | string - readonly frontMatterFormatter?: (globs: string) => unknown - readonly additionalFrontMatter?: Record -} - -/** - * Options for executing write operations with dry-run support - */ -export interface WriteOperationOptions { - readonly ctx: OutputWriteContext - readonly type: string - readonly fullPath: string - readonly relativePath: RelativePath - readonly label?: string | undefined -} - -/** - * Context for error handling - */ -export interface ErrorContext { - readonly action: string - readonly path?: string - readonly [key: string]: unknown -} - -/** - * Options for transforming command names in output filenames. - * Used by transformCommandName method to control prefix handling. - */ -export interface CommandNameTransformOptions { - readonly includeSeriesPrefix?: boolean - readonly seriesSeparator?: string -} - -/** - * Options for configuring AbstractOutputPlugin subclasses. - */ -export interface AbstractOutputPluginOptions { - globalConfigDir?: string - - outputFileName?: string - - dependsOn?: readonly string[] - - indexignore?: string -} - -/** - * Options for combining global content with project content. - */ -export interface CombineOptions { - separator?: string - - skipIfEmpty?: boolean - - position?: 'before' | 'after' -} - -export abstract class AbstractOutputPlugin extends AbstractPlugin implements OutputPlugin { - protected readonly globalConfigDir: string - - protected readonly outputFileName: string - - protected readonly indexignore: string | undefined - - private readonly registryWriterCache: Map> = new Map() - - private readonly writeEffects: EffectRegistration[] = [] - - private readonly cleanEffects: EffectRegistration[] = [] - - protected constructor(name: string, options?: AbstractOutputPluginOptions) { - super(name, PluginKind.Output, options?.dependsOn) - this.globalConfigDir = options?.globalConfigDir ?? '' - this.outputFileName = options?.outputFileName ?? '' - this.indexignore = options?.indexignore - } - - protected resolvePromptSourceProjectConfig(ctx: OutputPluginContext | OutputWriteContext): ProjectConfig | undefined { - const {projects} = ctx.collectedInputContext.workspace - const promptSource = projects.find(p => p.isPromptSourceProject === true) - return promptSource?.projectConfig ?? projects[0]?.projectConfig - } - - protected registerWriteEffect(name: string, handler: WriteEffectHandler): void { - this.writeEffects.push({name, handler}) - } - - protected registerCleanEffect(name: string, handler: CleanEffectHandler): void { - this.cleanEffects.push({name, handler}) - } - - protected async executeWriteEffects(ctx: OutputWriteContext): Promise { - const results: EffectResult[] = [] - - for (const effect of this.writeEffects) { - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'effect', name: effect.name}) - results.push({success: true, description: `Would execute write effect: ${effect.name}`}) - continue - } - - try { - const result = await effect.handler(ctx) - if (result.success) this.log.trace({action: 'effect', name: effect.name, status: 'success'}) - else { - const errorMsg = result.error instanceof Error ? result.error.message : String(result.error) - this.log.error({action: 'effect', name: effect.name, status: 'failed', error: errorMsg}) - } - results.push(result) - } - catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'effect', name: effect.name, status: 'failed', error: errorMsg}) - results.push({success: false, error: error as Error, description: `Write effect failed: ${effect.name}`}) - } - } - - return results - } - - protected async executeCleanEffects(ctx: OutputCleanContext): Promise { - const results: EffectResult[] = [] - - for (const effect of this.cleanEffects) { - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'effect', name: effect.name}) - results.push({success: true, description: `Would execute clean effect: ${effect.name}`}) - continue - } - - try { - const result = await effect.handler(ctx) - if (result.success) this.log.trace({action: 'effect', name: effect.name, status: 'success'}) - else { - const errorMsg = result.error instanceof Error ? result.error.message : String(result.error) - this.log.error({action: 'effect', name: effect.name, status: 'failed', error: errorMsg}) - } - results.push(result) - } - catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'effect', name: effect.name, status: 'failed', error: errorMsg}) - results.push({success: false, error: error as Error, description: `Clean effect failed: ${effect.name}`}) - } - } - - return results - } - - protected isRelativePath(p: Path): p is RelativePath { - return p.pathKind === FilePathKind.Relative - } - - protected toRelativePath(p: Path): RelativePath { - if (this.isRelativePath(p)) return p - return { // Fallback for non-relative paths - pathKind: FilePathKind.Relative, - path: p.path, - basePath: '', - getDirectoryName: p.getDirectoryName, - getAbsolutePath: () => p.path - } - } - - protected resolveFullPath(targetPath: Path, outputFileName?: string): string { - let dirPath: string - if (targetPath.pathKind === FilePathKind.Absolute) dirPath = targetPath.path - else if (this.isRelativePath(targetPath)) dirPath = path.resolve(targetPath.basePath, targetPath.path) - else dirPath = path.resolve(process.cwd(), targetPath.path) - - const fileName = outputFileName ?? this.outputFileName // Append the output file name if provided or if default is set - if (fileName) return path.join(dirPath, fileName) - return dirPath - } - - protected createRelativePath( - pathStr: string, - basePath: string, - dirNameFn: () => string - ): RelativePath { - return deskCreateRelativePath(pathStr, basePath, dirNameFn) - } - - protected createFileRelativePath(dir: RelativePath, fileName: string): RelativePath { - return deskCreateFileRelativePath(dir, fileName) - } - - protected getGlobalConfigDir(): string { - return path.join(this.getHomeDir(), this.globalConfigDir) - } - - protected getHomeDir(): string { - return os.homedir() - } - - protected joinPath(...segments: string[]): string { - return path.join(...segments) - } - - protected resolvePath(...segments: string[]): string { - return path.resolve(...segments) - } - - protected dirname(p: string): string { - return path.dirname(p) - } - - protected basename(p: string, ext?: string): string { - return path.basename(p, ext) - } - - protected writeFileSync(filePath: string, content: string, encoding: BufferEncoding = 'utf8'): void { - deskWriteFileSync(filePath, content, encoding) - } - - protected writeFileSyncBuffer(filePath: string, buffer: Buffer): void { - deskWriteFileSync(filePath, buffer) - } - - protected ensureDirectory(dir: string): void { - deskEnsureDir(dir) - } - - protected existsSync(p: string): boolean { - return fs.existsSync(p) - } - - protected lstatSync(p: string): fs.Stats { - return deskLstatSync(p) - } - - protected isSymlink(p: string): boolean { - return deskIsSymlink(p) - } - - protected createSymlink(targetPath: string, symlinkPath: string, type: 'file' | 'dir' = 'dir'): void { - deskCreateSymlink(targetPath, symlinkPath, type) - } - - protected removeSymlink(symlinkPath: string): void { - deskRemoveSymlink(symlinkPath) - } - - protected async writeDirectorySymlink( - ctx: OutputWriteContext, - targetPath: string, - symlinkPath: string, - label: string - ): Promise { - const dir = path.dirname(symlinkPath) - const linkName = path.basename(symlinkPath) - const relativePath: RelativePath = deskCreateRelativePath(linkName, dir, () => path.basename(dir)) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'symlink', target: targetPath, link: symlinkPath, label}) - return {path: relativePath, success: true, skipped: false} - } - - try { - this.createSymlink(targetPath, symlinkPath, 'dir') - this.log.trace({action: 'write', type: 'symlink', target: targetPath, link: symlinkPath, label}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'symlink', target: targetPath, link: symlinkPath, label, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - protected readdirSync(dir: string, options: {withFileTypes: true}): fs.Dirent[] - protected readdirSync(dir: string): string[] - protected readdirSync(dir: string, options?: {withFileTypes?: boolean}): fs.Dirent[] | string[] { - if (options?.withFileTypes === true) return fs.readdirSync(dir, {withFileTypes: true}) - return fs.readdirSync(dir) - } - - protected getIgnoreOutputPath(): string | undefined { - if (this.indexignore == null) return void 0 - return this.indexignore - } - - protected registerProjectIgnoreOutputFiles(projects: readonly Project[]): RelativePath[] { - const outputPath = this.getIgnoreOutputPath() - if (outputPath == null) return [] - - const results: RelativePath[] = [] - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - if (project.isPromptSourceProject === true) continue - - const filePath = path.join(projectDir.path, outputPath) - results.push({ - pathKind: FilePathKind.Relative, - path: filePath, - basePath: projectDir.basePath, - getDirectoryName: () => path.basename(projectDir.path), - getAbsolutePath: () => path.join(projectDir.basePath, filePath) - }) - } - - return results - } - - protected async writeProjectIgnoreFiles(ctx: OutputWriteContext): Promise { - const outputPath = this.getIgnoreOutputPath() - if (outputPath == null) return [] - - const {workspace, aiAgentIgnoreConfigFiles} = ctx.collectedInputContext - const results: WriteResult[] = [] - - if (aiAgentIgnoreConfigFiles == null || aiAgentIgnoreConfigFiles.length === 0) return results - - const ignoreFile = aiAgentIgnoreConfigFiles.find(file => file.fileName === this.indexignore) - if (ignoreFile == null) return results - - for (const project of workspace.projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - if (project.isPromptSourceProject === true) continue - - const label = `project:${project.name ?? 'unknown'}/${ignoreFile.fileName}` - const filePath = path.join(projectDir.path, outputPath) - const fullPath = path.join(projectDir.basePath, filePath) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: filePath, - basePath: projectDir.basePath, - getDirectoryName: () => path.basename(projectDir.path), - getAbsolutePath: () => fullPath - } - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'ignoreFile', path: fullPath, label}) - results.push({path: relativePath, success: true, skipped: false}) - continue - } - - try { - fs.mkdirSync(path.dirname(fullPath), {recursive: true}) - fs.writeFileSync(fullPath, ignoreFile.content, 'utf8') - this.log.trace({action: 'write', type: 'ignoreFile', path: fullPath, label}) - results.push({path: relativePath, success: true}) - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'ignoreFile', path: fullPath, label, error: errMsg}) - results.push({path: relativePath, success: false, error: error as Error}) - } - } - - return results - } - - protected async writeFile( - ctx: OutputWriteContext, - fullPath: string, - content: string, - label: string - ): Promise { - const dir = path.dirname(fullPath) // Create a relative path for the result - const fileName = path.basename(fullPath) - const relativePath: RelativePath = deskCreateRelativePath(fileName, dir, () => path.basename(dir)) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'file', path: fullPath, label}) - return {path: relativePath, success: true, skipped: false} - } - - try { - this.ensureDirectory(dir) // Ensure parent directory exists before writing - deskWriteFileSync(fullPath, content) - this.log.trace({action: 'write', type: 'file', path: fullPath, label}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'file', path: fullPath, label, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - protected async writePromptFile( - ctx: OutputWriteContext, - targetPath: Path, - content: string, - label: string - ): Promise { - const fullPath = this.resolveFullPath(targetPath) - const relativePath = this.toRelativePath(targetPath) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'promptFile', path: fullPath, label}) - return {path: relativePath, success: true, skipped: false} - } - - try { - deskWriteFileSync(fullPath, content) - this.log.trace({action: 'write', type: 'promptFile', path: fullPath, label}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'promptFile', path: fullPath, label, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - protected buildMarkdownContent(content: string, frontMatter?: Record): string { - return buildMarkdownWithFrontMatter(frontMatter, content) - } - - protected buildMarkdownContentWithRaw( - content: string, - frontMatter?: Record, - rawFrontMatter?: string - ): string { - if (frontMatter != null && Object.keys(frontMatter).length > 0) return buildMarkdownWithFrontMatter(frontMatter, content) // If we have parsed front matter, use it - - if (rawFrontMatter != null && rawFrontMatter.length > 0) return `---\n${rawFrontMatter}\n---\n${content}` // If we have raw front matter but parsing failed, use raw - - return content // No front matter - } - - protected extractGlobalMemoryContent(ctx: OutputWriteContext): string | undefined { - return ctx.collectedInputContext.globalMemory?.content as string | undefined - } - - protected combineGlobalWithContent( - globalContent: string | undefined, - projectContent: string, - options?: CombineOptions - ): string { - const { - separator = '\n\n', - skipIfEmpty = true, - position = 'before' - } = options ?? {} - - if (skipIfEmpty && (globalContent == null || globalContent.trim().length === 0)) return projectContent // Skip if global content is undefined/null or empty/whitespace when skipIfEmpty is true - - const effectiveGlobalContent = globalContent ?? '' // If global content is null/undefined but skipIfEmpty is false, treat as empty string - - if (position === 'after') return `${projectContent}${separator}${effectiveGlobalContent}` // Combine based on position - - return `${effectiveGlobalContent}${separator}${projectContent}` // Default: 'before' - } - - protected transformCommandName( - cmd: CommandPrompt, - options?: CommandNameTransformOptions - ): string { - const {includeSeriesPrefix = true, seriesSeparator = '-'} = options ?? {} - - if (!includeSeriesPrefix || cmd.commandPrefix == null) return `${cmd.commandName}.md` // If prefix should not be included or prefix is not present, return just commandName - - return `${cmd.commandPrefix}${seriesSeparator}${cmd.commandName}.md` - } - - protected getCommandSeriesOptions(ctx: OutputWriteContext): CommandSeriesPluginOverride { - const globalOptions = ctx.pluginOptions?.commandSeriesOptions - const pluginOverride = globalOptions?.pluginOverrides?.[this.name] - - const includeSeriesPrefix = pluginOverride?.includeSeriesPrefix ?? globalOptions?.includeSeriesPrefix // Only include properties that have defined values to satisfy exactOptionalPropertyTypes // Plugin-specific overrides take precedence over global settings - const seriesSeparator = pluginOverride?.seriesSeparator - - if (includeSeriesPrefix != null && seriesSeparator != null) return {includeSeriesPrefix, seriesSeparator} // Build result object conditionally to avoid assigning undefined to readonly properties - if (includeSeriesPrefix != null) return {includeSeriesPrefix} - if (seriesSeparator != null) return {seriesSeparator} - return {} - } - - protected getTransformOptionsFromContext( - ctx: OutputWriteContext, - additionalOptions?: CommandNameTransformOptions - ): CommandNameTransformOptions { - const seriesOptions = this.getCommandSeriesOptions(ctx) - - const includeSeriesPrefix = seriesOptions.includeSeriesPrefix ?? additionalOptions?.includeSeriesPrefix // Only include properties that have defined values to satisfy exactOptionalPropertyTypes // Merge: additionalOptions (plugin defaults) <- seriesOptions (config overrides) - const seriesSeparator = seriesOptions.seriesSeparator ?? additionalOptions?.seriesSeparator - - if (includeSeriesPrefix != null && seriesSeparator != null) return {includeSeriesPrefix, seriesSeparator} // Build result object conditionally to avoid assigning undefined to readonly properties - if (includeSeriesPrefix != null) return {includeSeriesPrefix} - if (seriesSeparator != null) return {seriesSeparator} - return {} - } - - protected shouldSkipDueToPlugin(ctx: OutputWriteContext, precedingPluginName: string): boolean { - const registeredPlugins = ctx.registeredPluginNames - if (registeredPlugins == null) return false - return registeredPlugins.includes(precedingPluginName) - } - - async onWriteComplete(ctx: OutputWriteContext, results: WriteResults): Promise { - const success = results.files.filter(r => r.success).length - const skipped = results.files.filter(r => r.skipped).length - const failed = results.files.filter(r => !r.success && !r.skipped).length - - this.log.trace({action: ctx.dryRun === true ? 'dryRun' : 'complete', type: 'writeSummary', success, skipped, failed}) - - await this.executeWriteEffects(ctx) // Execute registered write effects - } - - async onCleanComplete(ctx: OutputCleanContext): Promise { - await this.executeCleanEffects(ctx) // Execute registered clean effects - } - - protected getRegistryWriter< - TEntry, - TRegistry extends RegistryData, - T extends RegistryWriter - >( - WriterClass: new (logger: ILogger) => T - ): T { - const cacheKey = WriterClass.name - - const cached = this.registryWriterCache.get(cacheKey) // Check cache first - if (cached != null) return cached as T - - const writer = new WriterClass(this.log) // Create new instance and cache it - this.registryWriterCache.set(cacheKey, writer as RegistryWriter) - return writer - } - - protected async registerInRegistry< - TEntry, - TRegistry extends RegistryData - >( - writer: RegistryWriter, - entries: readonly TEntry[], - ctx: OutputWriteContext - ): Promise { - return writer.register(entries, ctx.dryRun) - } - - protected normalizeRuleScope(rule: RulePrompt): RuleScope { - return rule.scope ?? 'project' - } - - protected handleError( - error: unknown, - context: ErrorContext - ): {success: false, error: Error} { - const errorMsg = error instanceof Error ? error.message : String(error) - this.log.error({...context, error: errorMsg}) - return {success: false, error: error as Error} - } - - protected async executeWriteOperation( - options: WriteOperationOptions, - execute: () => Promise - ): Promise { - const {ctx, type, fullPath, relativePath, label} = options - - if (ctx.dryRun === true) { // Handle dry-run mode - this.log.trace({action: 'dryRun', type, path: fullPath, label}) - return {path: relativePath, success: true, skipped: false} - } - - try { // Execute with standardized error handling - const result = await execute() - this.log.trace({action: 'write', type, path: fullPath, label}) - return result - } catch (error) { - return {...this.handleError(error, {action: 'write', type, path: fullPath, label}), path: relativePath} - } - } - - protected buildSkillFrontMatter( - skill: SkillPrompt, - options?: SkillFrontMatterOptions - ): Record { - const fm = skill.yamlFrontMatter - const result: Record = { - name: fm.name, - description: fm.description - } - - if ('displayName' in fm && fm.displayName != null) { // Conditionally add optional fields - result['displayName'] = fm.displayName - } - if ('keywords' in fm && fm.keywords != null && fm.keywords.length > 0) result['keywords'] = fm.keywords - if ('author' in fm && fm.author != null) result['author'] = fm.author - if ('version' in fm && fm.version != null) result['version'] = fm.version - - const includeTools = options?.includeTools ?? true // Handle tools based on options - if (includeTools && 'allowTools' in fm && fm.allowTools != null && fm.allowTools.length > 0) { - const toolFormat = options?.toolFormat ?? 'array' - result['allowTools'] = toolFormat === 'string' ? fm.allowTools.join(',') : fm.allowTools - } - - if (options?.additionalFields != null) { // Add any additional custom fields - Object.assign(result, options.additionalFields) - } - - return result - } - - protected buildRuleContent( - rule: RulePrompt, - options: RuleContentOptions - ): string { - const globsFormatted = rule.globs.length > 0 - ? rule.globs.join(options.globJoinPattern) - : '' - - const fmData: Record = { - alwaysApply: options.alwaysApply, - globs: options.frontMatterFormatter - ? options.frontMatterFormatter(globsFormatted) - : globsFormatted, - ...options.additionalFrontMatter - } - - return buildMarkdownWithFrontMatter(fmData, rule.content) - } - - protected buildRuleFileName( - rule: RulePrompt, - prefix: string = 'rule-' - ): string { - return `${prefix}${rule.series}-${rule.ruleName}.mdc` - } - - protected async writeFileWithHandling( - ctx: OutputWriteContext, - fullPath: string, - content: string, - options: { - type: string - label?: string - relativePath: RelativePath - } - ): Promise { - const result = await this.executeWriteOperation( - { - ctx, - type: options.type, - fullPath, - relativePath: options.relativePath, - label: options.label - }, - async () => { - this.ensureDirectory(path.dirname(fullPath)) - this.writeFileSync(fullPath, content) - return {path: options.relativePath, success: true as const} - } - ) - - if ('success' in result && !result.success) { // If executeWriteOperation returned a WriteResult (error case), pass it through - return result - } - - return {path: options.relativePath, success: true} - } -} diff --git a/cli/src/plugins/plugin-output-shared/BaseCLIOutputPlugin.ts b/cli/src/plugins/plugin-output-shared/BaseCLIOutputPlugin.ts deleted file mode 100644 index eacf9fe8..00000000 --- a/cli/src/plugins/plugin-output-shared/BaseCLIOutputPlugin.ts +++ /dev/null @@ -1,551 +0,0 @@ -import type { - CommandPrompt, - OutputPluginContext, - OutputWriteContext, - RulePrompt, - RuleScope, - SkillPrompt, - SubAgentPrompt, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import type {AbstractOutputPluginOptions} from './AbstractOutputPlugin' -import * as path from 'node:path' -import {writeFileSync as deskWriteFileSync} from '@truenine/desk-paths' -import {mdxToMd} from '@truenine/md-compiler' -import {GlobalScopeCollector} from '@truenine/plugin-input-shared' -import {AbstractOutputPlugin} from './AbstractOutputPlugin' -import {filterCommandsByProjectConfig, filterSkillsByProjectConfig, filterSubAgentsByProjectConfig} from './utils' - -export interface BaseCLIOutputPluginOptions extends AbstractOutputPluginOptions { - readonly commandsSubDir?: string - readonly agentsSubDir?: string - readonly skillsSubDir?: string - - readonly supportsCommands?: boolean - - readonly supportsSubAgents?: boolean - - readonly supportsSkills?: boolean - - readonly toolPreset?: string -} - -export abstract class BaseCLIOutputPlugin extends AbstractOutputPlugin { - protected readonly commandsSubDir: string - protected readonly agentsSubDir: string - protected readonly skillsSubDir: string - protected readonly supportsCommands: boolean - protected readonly supportsSubAgents: boolean - protected readonly supportsSkills: boolean - protected readonly toolPreset?: string - - constructor(name: string, options: BaseCLIOutputPluginOptions) { - super(name, options) - this.commandsSubDir = options.commandsSubDir ?? 'commands' - this.agentsSubDir = options.agentsSubDir ?? 'agents' - this.skillsSubDir = options.skillsSubDir ?? 'skills' - this.supportsCommands = options.supportsCommands ?? true - this.supportsSubAgents = options.supportsSubAgents ?? true - this.supportsSkills = options.supportsSkills ?? true - if (options.toolPreset !== void 0) this.toolPreset = options.toolPreset - } - - async registerGlobalOutputDirs(_ctx: OutputPluginContext): Promise { - return [] - } - - async registerProjectOutputDirs(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - - const subdirs: string[] = [] // Subdirectories might be needed there too // Most CLI tools store project-local config in a hidden folder .toolname - if (this.supportsCommands) subdirs.push(this.commandsSubDir) - if (this.supportsSubAgents) subdirs.push(this.agentsSubDir) - if (this.supportsSkills) subdirs.push(this.skillsSubDir) - - this.log.debug('registerProjectOutputDirs', { - plugin: this.name, - projectCount: projects.length, - supportsCommands: this.supportsCommands, - supportsSubAgents: this.supportsSubAgents, - supportsSkills: this.supportsSkills, - subdirs, - commandsCount: ctx.collectedInputContext.commands?.length ?? 0, - subAgentsCount: ctx.collectedInputContext.subAgents?.length ?? 0, - skillsCount: ctx.collectedInputContext.skills?.length ?? 0 - }) - - if (subdirs.length === 0) { - this.log.debug('no subdirs to register', {plugin: this.name}) - return [] - } - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) { - this.log.debug('project has no dirFromWorkspacePath', {plugin: this.name, projectName: project.name}) - continue - } - - for (const subdir of subdirs) { - const dirPath = path.join(project.dirFromWorkspacePath.path, this.globalConfigDir, subdir) // Assuming globalConfigDir is something like .claude - results.push(this.createRelativePath(dirPath, project.dirFromWorkspacePath.basePath, () => subdir)) - this.log.debug('registered output dir', {plugin: this.name, project: project.name, subdir, dirPath}) - } - } - - this.log.debug('registerProjectOutputDirs complete', {plugin: this.name, dirCount: results.length}) - return results - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - - this.log.debug('registerProjectOutputFiles start', { - plugin: this.name, - projectCount: projects.length, - commandsAvailable: ctx.collectedInputContext.commands != null, - commandsCount: ctx.collectedInputContext.commands?.length ?? 0, - subAgentsAvailable: ctx.collectedInputContext.subAgents != null, - subAgentsCount: ctx.collectedInputContext.subAgents?.length ?? 0, - skillsAvailable: ctx.collectedInputContext.skills != null, - skillsCount: ctx.collectedInputContext.skills?.length ?? 0 - }) - - for (const project of projects) { - this.log.debug('processing project', { - plugin: this.name, - projectName: project.name, - hasRootMemory: project.rootMemoryPrompt != null, - childMemoryCount: project.childMemoryPrompts?.length ?? 0, - hasDirFromWorkspace: project.dirFromWorkspacePath != null, - projectConfig: project.projectConfig - }) - - if (project.rootMemoryPrompt != null && project.dirFromWorkspacePath != null) { // Root memory file - results.push(this.createFileRelativePath(project.dirFromWorkspacePath, this.outputFileName)) - } - - if (project.childMemoryPrompts != null) { // Child memory files - for (const child of project.childMemoryPrompts) { - if (child.dir != null && this.isRelativePath(child.dir)) results.push(this.createFileRelativePath(child.dir, this.outputFileName)) - } - } - - if (project.dirFromWorkspacePath == null) { - this.log.debug('project has no dirFromWorkspacePath, skipping', {plugin: this.name, projectName: project.name}) - continue - } - - const {projectConfig} = project - const basePath = path.join(project.dirFromWorkspacePath.path, this.globalConfigDir) - const transformOptions = {includeSeriesPrefix: true} as const - - if (this.supportsCommands && ctx.collectedInputContext.commands != null) { - const allCommands = ctx.collectedInputContext.commands - const filteredCommands = filterCommandsByProjectConfig(allCommands, projectConfig) - this.log.debug('filtering commands', { - plugin: this.name, - projectName: project.name, - totalCommands: allCommands.length, - filteredCommands: filteredCommands.length, - projectConfig - }) - for (const cmd of filteredCommands) { - const fileName = this.transformCommandName(cmd, transformOptions) - results.push(this.createRelativePath(path.join(basePath, this.commandsSubDir, fileName), project.dirFromWorkspacePath.basePath, () => this.commandsSubDir)) - this.log.debug('registered command file', {plugin: this.name, project: project.name, fileName}) - } - } else { - this.log.debug('commands skipped', { - plugin: this.name, - supportsCommands: this.supportsCommands, - hasCommands: ctx.collectedInputContext.commands != null - }) - } - - if (this.supportsSubAgents && ctx.collectedInputContext.subAgents != null) { - const allSubAgents = ctx.collectedInputContext.subAgents - const filteredSubAgents = filterSubAgentsByProjectConfig(allSubAgents, projectConfig) - this.log.debug('filtering subAgents', { - plugin: this.name, - projectName: project.name, - totalSubAgents: allSubAgents.length, - filteredSubAgents: filteredSubAgents.length, - projectConfig - }) - for (const agent of filteredSubAgents) { - const fileName = agent.dir.path.replace(/\.mdx$/, '.md') - results.push(this.createRelativePath(path.join(basePath, this.agentsSubDir, fileName), project.dirFromWorkspacePath.basePath, () => this.agentsSubDir)) - this.log.debug('registered agent file', {plugin: this.name, project: project.name, fileName}) - } - } else { - this.log.debug('subAgents skipped', { - plugin: this.name, - supportsSubAgents: this.supportsSubAgents, - hasSubAgents: ctx.collectedInputContext.subAgents != null - }) - } - - if (this.supportsSkills && ctx.collectedInputContext.skills != null) { - const allSkills = ctx.collectedInputContext.skills - const filteredSkills = filterSkillsByProjectConfig(allSkills, projectConfig) - this.log.debug('filtering skills', { - plugin: this.name, - projectName: project.name, - totalSkills: allSkills.length, - filteredSkills: filteredSkills.length - }) - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter?.name ?? skill.dir.getDirectoryName() - const skillDir = path.join(basePath, this.skillsSubDir, skillName) - - results.push(this.createRelativePath(path.join(skillDir, 'SKILL.md'), project.dirFromWorkspacePath.basePath, () => skillName)) - - if (skill.childDocs != null) { - for (const refDoc of skill.childDocs) { - const refDocFileName = refDoc.dir.path.replace(/\.mdx$/, '.md') - const refDocPath = path.join(skillDir, refDocFileName) - results.push(this.createRelativePath(refDocPath, project.dirFromWorkspacePath.basePath, () => skillName)) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - const resourcePath = path.join(skillDir, resource.relativePath) - results.push(this.createRelativePath(resourcePath, project.dirFromWorkspacePath.basePath, () => skillName)) - } - } - } - } else { - this.log.debug('skills skipped', { - plugin: this.name, - supportsSkills: this.supportsSkills, - hasSkills: ctx.collectedInputContext.skills != null - }) - } - } - - this.log.debug('registerProjectOutputFiles complete', {plugin: this.name, fileCount: results.length}) - return results - } - - async registerGlobalOutputFiles(ctx: OutputPluginContext): Promise { - const {globalMemory} = ctx.collectedInputContext - if (globalMemory == null) return [] - - const globalDir = this.getGlobalConfigDir() - return [ - this.createRelativePath(this.outputFileName, globalDir, () => this.globalConfigDir) - ] - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {workspace, globalMemory, commands, subAgents, skills} = ctx.collectedInputContext - const hasProjectOutputs = workspace.projects.some( - p => p.rootMemoryPrompt != null || (p.childMemoryPrompts?.length ?? 0) > 0 - ) - const hasGlobalMemory = globalMemory != null - const hasProjectLevelCommands = this.supportsCommands && (commands?.length ?? 0) > 0 && workspace.projects.length > 0 - const hasProjectLevelSubAgents = this.supportsSubAgents && (subAgents?.length ?? 0) > 0 && workspace.projects.length > 0 - const hasProjectLevelSkills = this.supportsSkills && (skills?.length ?? 0) > 0 && workspace.projects.length > 0 - - this.log.debug('canWrite check', { - plugin: this.name, - hasProjectOutputs, - hasGlobalMemory, - hasProjectLevelCommands, - hasProjectLevelSubAgents, - hasProjectLevelSkills, - projectCount: workspace.projects.length, - commandsCount: commands?.length ?? 0, - subAgentsCount: subAgents?.length ?? 0, - skillsCount: skills?.length ?? 0, - supportsCommands: this.supportsCommands, - supportsSubAgents: this.supportsSubAgents, - supportsSkills: this.supportsSkills - }) - - if (hasProjectOutputs || hasGlobalMemory || hasProjectLevelCommands || hasProjectLevelSubAgents || hasProjectLevelSkills) return true - - this.log.trace({action: 'skip', reason: 'noOutputs'}) - return false - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const {projects} = ctx.collectedInputContext.workspace - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - this.log.debug('writeProjectOutputs start', { - plugin: this.name, - projectCount: projects.length, - commandsCount: ctx.collectedInputContext.commands?.length ?? 0, - subAgentsCount: ctx.collectedInputContext.subAgents?.length ?? 0, - skillsCount: ctx.collectedInputContext.skills?.length ?? 0 - }) - - for (const project of projects) { - const projectName = project.name ?? 'unknown' - const projectDir = project.dirFromWorkspacePath - - this.log.debug('writing project outputs', { - plugin: this.name, - projectName, - hasProjectDir: projectDir != null, - projectConfig: project.projectConfig - }) - - if (projectDir == null) { - this.log.debug('project has no dirFromWorkspacePath, skipping', {plugin: this.name, projectName}) - continue - } - - if (project.rootMemoryPrompt != null) { - const result = await this.writePromptFile(ctx, projectDir, project.rootMemoryPrompt.content as string, `project:${projectName}/root`) - fileResults.push(result) - } - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) { - const childResult = await this.writePromptFile(ctx, child.dir, child.content as string, `project:${projectName}/child:${child.workingChildDirectoryPath?.path ?? 'unknown'}`) - fileResults.push(childResult) - } - } - - const {projectConfig} = project - const basePath = path.join(projectDir.basePath, projectDir.path, this.globalConfigDir) - - if (this.supportsCommands && ctx.collectedInputContext.commands != null) { - const allCommands = ctx.collectedInputContext.commands - const filteredCommands = filterCommandsByProjectConfig(allCommands, projectConfig) - this.log.debug('writing commands', { - plugin: this.name, - projectName, - totalCommands: allCommands.length, - filteredCommands: filteredCommands.length, - projectConfig - }) - for (const cmd of filteredCommands) { - const cmdResults = await this.writeCommand(ctx, basePath, cmd) - fileResults.push(...cmdResults) - this.log.debug('wrote command', {plugin: this.name, projectName, commandName: cmd.commandName, success: cmdResults.every(r => r.success)}) - } - } else { - this.log.debug('commands not written', { - plugin: this.name, - supportsCommands: this.supportsCommands, - hasCommands: ctx.collectedInputContext.commands != null - }) - } - - if (this.supportsSubAgents && ctx.collectedInputContext.subAgents != null) { - const allSubAgents = ctx.collectedInputContext.subAgents - const filteredSubAgents = filterSubAgentsByProjectConfig(allSubAgents, projectConfig) - this.log.debug('writing subAgents', { - plugin: this.name, - projectName, - totalSubAgents: allSubAgents.length, - filteredSubAgents: filteredSubAgents.length, - projectConfig - }) - for (const agent of filteredSubAgents) { - const agentResults = await this.writeSubAgent(ctx, basePath, agent) - fileResults.push(...agentResults) - this.log.debug('wrote subAgent', {plugin: this.name, projectName, agentPath: agent.dir.path, success: agentResults.every(r => r.success)}) - } - } else { - this.log.debug('subAgents not written', { - plugin: this.name, - supportsSubAgents: this.supportsSubAgents, - hasSubAgents: ctx.collectedInputContext.subAgents != null - }) - } - - if (this.supportsSkills && ctx.collectedInputContext.skills != null) { - const allSkills = ctx.collectedInputContext.skills - const filteredSkills = filterSkillsByProjectConfig(allSkills, projectConfig) - this.log.debug('writing skills', { - plugin: this.name, - projectName, - totalSkills: allSkills.length, - filteredSkills: filteredSkills.length - }) - for (const skill of filteredSkills) { - const skillResults = await this.writeSkill(ctx, basePath, skill) - fileResults.push(...skillResults) - this.log.debug('wrote skill', {plugin: this.name, projectName, skillName: skill.yamlFrontMatter?.name, success: skillResults.every(r => r.success)}) - } - } else { - this.log.debug('skills not written', { - plugin: this.name, - supportsSkills: this.supportsSkills, - hasSkills: ctx.collectedInputContext.skills != null - }) - } - } - - return {files: fileResults, dirs: dirResults} - } - - async writeGlobalOutputs(ctx: OutputWriteContext): Promise { - const {globalMemory} = ctx.collectedInputContext - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - if (globalMemory == null) return {files: fileResults, dirs: dirResults} - - const globalDir = this.getGlobalConfigDir() - const fullPath = path.join(globalDir, this.outputFileName) - const relativePath: RelativePath = this.createRelativePath(this.outputFileName, globalDir, () => this.globalConfigDir) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'globalMemory', path: fullPath}) - fileResults.push({ - path: relativePath, - success: true, - skipped: false - }) - } else { - try { - deskWriteFileSync(fullPath, globalMemory.content as string) - this.log.trace({action: 'write', type: 'globalMemory', path: fullPath}) - fileResults.push({path: relativePath, success: true}) - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'globalMemory', path: fullPath, error: errMsg}) - fileResults.push({path: relativePath, success: false, error: error as Error}) - } - } - - return {files: fileResults, dirs: dirResults} - } - - protected async writeCommand( - ctx: OutputWriteContext, - basePath: string, - cmd: CommandPrompt - ): Promise { - const transformOptions = this.getTransformOptionsFromContext(ctx) - const fileName = this.transformCommandName(cmd, transformOptions) - const targetDir = path.join(basePath, this.commandsSubDir) - const fullPath = path.join(targetDir, fileName) - - let compiledContent = cmd.content - let compiledFrontMatter = cmd.yamlFrontMatter - let useRecompiledFrontMatter = false - - if (cmd.rawMdxContent != null && this.toolPreset != null) { // Only recompile if we have raw content AND a tool preset is configured - this.log.debug('recompiling command with tool preset', { - file: cmd.dir.getAbsolutePath(), - toolPreset: this.toolPreset, - hasRawContent: true - }) - try { - // eslint-disable-next-line ts/no-unsafe-assignment - const scopeCollector = new GlobalScopeCollector({toolPreset: this.toolPreset as any}) // Cast to clean - const globalScope = scopeCollector.collect() - const result = await mdxToMd(cmd.rawMdxContent, {globalScope, extractMetadata: true, basePath: cmd.dir.basePath}) - compiledContent = result.content - compiledFrontMatter = result.metadata.fields as typeof cmd.yamlFrontMatter - useRecompiledFrontMatter = true - } - catch (e) { - this.log.warn('failed to recompile command, using default', { - file: cmd.dir.getAbsolutePath(), - error: e instanceof Error ? e.message : String(e) - }) - } - } - - const content = useRecompiledFrontMatter - ? this.buildMarkdownContent(compiledContent, compiledFrontMatter) - : this.buildMarkdownContentWithRaw(compiledContent, compiledFrontMatter, cmd.rawFrontMatter) - - return [await this.writeFile(ctx, fullPath, content, 'command')] - } - - protected async writeSubAgent( - ctx: OutputWriteContext, - basePath: string, - agent: SubAgentPrompt - ): Promise { - const fileName = agent.dir.path.replace(/\.mdx$/, '.md') - const targetDir = path.join(basePath, this.agentsSubDir) - const fullPath = path.join(targetDir, fileName) - - const content = this.buildMarkdownContentWithRaw( - agent.content, - agent.yamlFrontMatter, - agent.rawFrontMatter - ) - - return [await this.writeFile(ctx, fullPath, content, 'subAgent')] - } - - protected async writeSkill( - ctx: OutputWriteContext, - basePath: string, - skill: SkillPrompt - ): Promise { - const results: WriteResult[] = [] - const skillName = skill.yamlFrontMatter?.name ?? skill.dir.getDirectoryName() - const targetDir = path.join(basePath, this.skillsSubDir, skillName) - const fullPath = path.join(targetDir, 'SKILL.md') - - const content = this.buildMarkdownContentWithRaw( - skill.content as string, - skill.yamlFrontMatter, - skill.rawFrontMatter - ) - - const mainFileResult = await this.writeFile(ctx, fullPath, content, 'skill') - results.push(mainFileResult) - - if (skill.childDocs != null) { - for (const refDoc of skill.childDocs) { - const refResults = await this.writeSkillReferenceDocument(ctx, targetDir, skillName, refDoc, basePath) - results.push(...refResults) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - const refResults = await this.writeSkillResource(ctx, targetDir, skillName, resource, basePath) - results.push(...refResults) - } - } - - return results - } - - protected async writeSkillReferenceDocument( - ctx: OutputWriteContext, - skillDir: string, - _skillName: string, - refDoc: {dir: RelativePath, content: unknown}, - _basePath: string - ): Promise { - const fileName = refDoc.dir.path.replace(/\.mdx$/, '.md') - const fullPath = path.join(skillDir, fileName) - return [await this.writeFile(ctx, fullPath, refDoc.content as string, 'skillRefDoc')] - } - - protected async writeSkillResource( - ctx: OutputWriteContext, - skillDir: string, - _skillName: string, - resource: {relativePath: string, content: string}, - _basePath: string - ): Promise { - const fullPath = path.join(skillDir, resource.relativePath) - return [await this.writeFile(ctx, fullPath, resource.content, 'skillResource')] - } - - protected override normalizeRuleScope(rule: RulePrompt): RuleScope { - return rule.scope ?? 'project' - } -} diff --git a/cli/src/plugins/plugin-output-shared/constants.ts b/cli/src/plugins/plugin-output-shared/constants.ts deleted file mode 100644 index a665c4d9..00000000 --- a/cli/src/plugins/plugin-output-shared/constants.ts +++ /dev/null @@ -1,122 +0,0 @@ -/** - * Constants for output plugins - * Centralizes hardcoded strings to improve maintainability and reduce duplication - */ - -/** - * File and directory names used across output plugins - */ -export const OutputFileNames = { - /** Default skill file name */ - SKILL: 'SKILL.md', - /** Cursor global rule file */ - CURSOR_GLOBAL_RULE: 'global.mdc', - /** Cursor project rule file */ - CURSOR_PROJECT_RULE: 'always.md', - /** MCP configuration file */ - MCP_CONFIG: 'mcp.json', - /** Claude Code project memory file */ - CLAUDE_MEMORY: 'CLAUDE.md', - /** Windsurf global rules file */ - WINDSURF_GLOBAL_RULE: 'global_rules.md' -} as const - -/** - * Prefixes used for file naming - */ -export const OutputPrefixes = { - /** Rule file prefix */ - RULE: 'rule-', - /** Child rule/glob prefix */ - CHILD_RULE: 'glob-' -} as const - -/** - * Subdirectory names used by output plugins - */ -export const OutputSubdirectories = { - /** Rules subdirectory */ - RULES: 'rules', - /** Commands subdirectory */ - COMMANDS: 'commands', - /** Skills subdirectory */ - SKILLS: 'skills', - /** Agents subdirectory */ - AGENTS: 'agents', - /** Cursor-specific skills subdirectory */ - CURSOR_SKILLS: 'skills-cursor' -} as const - -/** - * Front matter field names - */ -export const FrontMatterFields = { - /** Always apply flag */ - ALWAYS_APPLY: 'alwaysApply', - /** Globs pattern */ - GLOBS: 'globs', - /** Description field */ - DESCRIPTION: 'description', - /** Name field */ - NAME: 'name', - /** Trigger type */ - TRIGGER: 'trigger' -} as const - -/** - * File extensions - */ -export const FileExtensions = { - /** Markdown file */ - MD: '.md', - /** Markdown with cursor config */ - MDC: '.mdc', - /** MDX file */ - MDX: '.mdx', - /** JSON file */ - JSON: '.json' -} as const - -/** - * Global configuration directory names - */ -export const GlobalConfigDirs = { - /** Cursor config directory */ - CURSOR: '.cursor', - /** Claude Code config directory */ - CLAUDE: '.claude', - /** Windsurf/Codeium config directory */ - WINDSURF: '.codeium/windsurf', - /** Generic Windsurf rules directory */ - WINDSURF_RULES: '.windsurf' -} as const - -/** - * Ignore file names - */ -export const IgnoreFiles = { - /** Cursor ignore file */ - CURSOR: '.cursorignore', - /** Windsurf ignore file */ - WINDSURF: '.codeiumignore' -} as const - -/** - * Preserved skill names that should not be overwritten - */ -export const PreservedSkills = { - CURSOR: new Set([ - 'create-rule', - 'create-skill', - 'create-subagent', - 'migrate-to-skills', - 'update-cursor-settings' - ]) -} as const - -/** - * Tool preset identifiers - */ -export const ToolPresets = { - CLAUDE_CODE: 'claudeCode' -} as const diff --git a/cli/src/plugins/plugin-output-shared/index.ts b/cli/src/plugins/plugin-output-shared/index.ts deleted file mode 100644 index fce9d52c..00000000 --- a/cli/src/plugins/plugin-output-shared/index.ts +++ /dev/null @@ -1,54 +0,0 @@ -export { - AbstractOutputPlugin -} from './AbstractOutputPlugin' -export type { - AbstractOutputPluginOptions, - CombineOptions, - CommandNameTransformOptions, - ErrorContext, - RuleContentOptions, - SkillFrontMatterOptions, - WriteOperationOptions -} from './AbstractOutputPlugin' -export { - BaseCLIOutputPlugin -} from './BaseCLIOutputPlugin' -export type { - BaseCLIOutputPluginOptions -} from './BaseCLIOutputPlugin' -export { - FileExtensions, - FrontMatterFields, - GlobalConfigDirs, - IgnoreFiles, - OutputFileNames, - OutputPrefixes, - OutputSubdirectories, - PreservedSkills, - ToolPresets -} from './constants' -export { - McpConfigManager, - transformMcpConfigForCursor, - transformMcpConfigForOpencode -} from './McpConfigManager' -export type { - McpConfigFormat, - McpConfigTransformer, - McpServerEntry, - McpWriteResult, - TransformedMcpConfig -} from './McpConfigManager' -export { - applySubSeriesGlobPrefix, - filterCommandsByProjectConfig, - filterRulesByProjectConfig, - filterSkillsByProjectConfig, - filterSubAgentsByProjectConfig, - findAllGitRepos, - findGitModuleInfoDirs, - matchesSeries, - resolveEffectiveIncludeSeries, - resolveGitInfoDir, - resolveSubSeries -} from './utils' diff --git a/cli/src/plugins/plugin-output-shared/registry/index.ts b/cli/src/plugins/plugin-output-shared/registry/index.ts deleted file mode 100644 index 658667cd..00000000 --- a/cli/src/plugins/plugin-output-shared/registry/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export { - RegistryWriter -} from './RegistryWriter' diff --git a/cli/src/plugins/plugin-output-shared/utils/filters.ts b/cli/src/plugins/plugin-output-shared/utils/filters.ts deleted file mode 100644 index 02818c1e..00000000 --- a/cli/src/plugins/plugin-output-shared/utils/filters.ts +++ /dev/null @@ -1,71 +0,0 @@ -import type {CommandPrompt, RulePrompt, SeriName, SkillPrompt, SubAgentPrompt} from '../../plugin-shared' -import type {ProjectConfig} from '../../plugin-shared/types' -import {matchesSeries, resolveEffectiveIncludeSeries} from './seriesFilter' - -/** - * Interface for items that can be filtered by series name - */ -export interface SeriesFilterable { - readonly seriName?: SeriName -} - -/** - * Configuration path types for project config lookup - */ -export type FilterConfigPath = 'commands' | 'skills' | 'subAgents' | 'rules' - -export function filterByProjectConfig( - items: readonly T[], - projectConfig: ProjectConfig | undefined, - configPath: FilterConfigPath -): readonly T[] { - const effectiveSeries = resolveEffectiveIncludeSeries( - projectConfig?.includeSeries, - projectConfig?.[configPath]?.includeSeries - ) - return items.filter(item => matchesSeries(item.seriName, effectiveSeries)) -} - -/** - * Filter commands by project configuration - * @deprecated Use filterByProjectConfig(commands, config, 'commands') instead - */ -export function filterCommandsByProjectConfig( - commands: readonly CommandPrompt[], - projectConfig: ProjectConfig | undefined -): readonly CommandPrompt[] { - return filterByProjectConfig(commands, projectConfig, 'commands') -} - -/** - * Filter skills by project configuration - * @deprecated Use filterByProjectConfig(skills, config, 'skills') instead - */ -export function filterSkillsByProjectConfig( - skills: readonly SkillPrompt[], - projectConfig: ProjectConfig | undefined -): readonly SkillPrompt[] { - return filterByProjectConfig(skills, projectConfig, 'skills') -} - -/** - * Filter sub-agents by project configuration - * @deprecated Use filterByProjectConfig(subAgents, config, 'subAgents') instead - */ -export function filterSubAgentsByProjectConfig( - subAgents: readonly SubAgentPrompt[], - projectConfig: ProjectConfig | undefined -): readonly SubAgentPrompt[] { - return filterByProjectConfig(subAgents, projectConfig, 'subAgents') -} - -/** - * Filter rules by project configuration - * @deprecated Use filterByProjectConfig(rules, config, 'rules') instead - */ -export function filterRulesByProjectConfig( - rules: readonly RulePrompt[], - projectConfig: ProjectConfig | undefined -): readonly RulePrompt[] { - return filterByProjectConfig(rules, projectConfig, 'rules') -} diff --git a/cli/src/plugins/plugin-output-shared/utils/gitUtils.ts b/cli/src/plugins/plugin-output-shared/utils/gitUtils.ts deleted file mode 100644 index eace5421..00000000 --- a/cli/src/plugins/plugin-output-shared/utils/gitUtils.ts +++ /dev/null @@ -1,121 +0,0 @@ -import * as fs from 'node:fs' -import * as path from 'node:path' - -/** - * Resolves the actual `.git/info` directory for a given project path. - * Handles both regular git repos (`.git` is a directory) and submodules/worktrees (`.git` is a file with `gitdir:` pointer). - * Returns `null` if no valid git info directory can be resolved. - */ -export function resolveGitInfoDir(projectDir: string): string | null { - const dotGitPath = path.join(projectDir, '.git') - - if (!fs.existsSync(dotGitPath)) return null - - const stat = fs.lstatSync(dotGitPath) - - if (stat.isDirectory()) { - const infoDir = path.join(dotGitPath, 'info') - return infoDir - } - - if (stat.isFile()) { - try { - const content = fs.readFileSync(dotGitPath, 'utf8').trim() - const match = /^gitdir: (.+)$/.exec(content) - if (match?.[1] != null) { - const gitdir = path.resolve(projectDir, match[1]) - return path.join(gitdir, 'info') - } - } - catch { /* ignore read errors */ } - } - - return null -} - -/** - * Recursively discovers all `.git` entries (directories or files) under a given root, - * skipping common non-source directories. - * Returns absolute paths of directories containing a `.git` entry. - */ -export function findAllGitRepos(rootDir: string, maxDepth = 5): string[] { - const results: string[] = [] - const SKIP_DIRS = new Set(['node_modules', '.turbo', 'dist', 'build', 'out', '.cache']) - - function walk(dir: string, depth: number): void { - if (depth > maxDepth) return - - let entries: fs.Dirent[] - try { - const raw = fs.readdirSync(dir, {withFileTypes: true}) - if (!Array.isArray(raw)) return - entries = raw - } - catch { return } - - const hasGit = entries.some(e => e.name === '.git') - if (hasGit && dir !== rootDir) results.push(dir) - - for (const entry of entries) { - if (!entry.isDirectory()) continue - if (entry.name === '.git' || SKIP_DIRS.has(entry.name)) continue - walk(path.join(dir, entry.name), depth + 1) - } - } - - walk(rootDir, 0) - return results -} - -/** - * Scans `.git/modules/` directory recursively to find all submodule `info/` dirs. - * Handles nested submodules (modules within modules). - * Returns absolute paths of `info/` directories. - */ -export function findGitModuleInfoDirs(dotGitDir: string): string[] { - const modulesDir = path.join(dotGitDir, 'modules') - if (!fs.existsSync(modulesDir)) return [] - - const results: string[] = [] - - function walk(dir: string): void { - let entries: fs.Dirent[] - try { - const raw = fs.readdirSync(dir, {withFileTypes: true}) - if (!Array.isArray(raw)) return - entries = raw - } - catch { return } - - const hasInfo = entries.some(e => e.name === 'info' && e.isDirectory()) - if (hasInfo) results.push(path.join(dir, 'info')) - - const nestedModules = entries.find(e => e.name === 'modules' && e.isDirectory()) - if (nestedModules == null) return - - let subEntries: fs.Dirent[] - try { - const raw = fs.readdirSync(path.join(dir, 'modules'), {withFileTypes: true}) - if (!Array.isArray(raw)) return - subEntries = raw - } - catch { return } - for (const sub of subEntries) { - if (sub.isDirectory()) walk(path.join(dir, 'modules', sub.name)) - } - } - - let topEntries: fs.Dirent[] - try { - const raw = fs.readdirSync(modulesDir, {withFileTypes: true}) - if (!Array.isArray(raw)) return results - topEntries = raw - } - catch { return results } - - for (const entry of topEntries) { - if (entry.isDirectory()) walk(path.join(modulesDir, entry.name)) - } - - return results -} diff --git a/cli/src/plugins/plugin-output-shared/utils/index.ts b/cli/src/plugins/plugin-output-shared/utils/index.ts deleted file mode 100644 index ba7639ed..00000000 --- a/cli/src/plugins/plugin-output-shared/utils/index.ts +++ /dev/null @@ -1,24 +0,0 @@ -export { - filterByProjectConfig, - filterCommandsByProjectConfig, - type FilterConfigPath, - filterRulesByProjectConfig, - filterSkillsByProjectConfig, - filterSubAgentsByProjectConfig, - type SeriesFilterable -} from './filters' -export { - findAllGitRepos, - findGitModuleInfoDirs, - resolveGitInfoDir -} from './gitUtils' -export { - applySubSeriesGlobPrefix, - getGlobalRules, - getProjectRules -} from './ruleFilter' -export { - matchesSeries, - resolveEffectiveIncludeSeries, - resolveSubSeries -} from './seriesFilter' diff --git a/cli/src/plugins/plugin-output-shared/utils/ruleFilter.ts b/cli/src/plugins/plugin-output-shared/utils/ruleFilter.ts deleted file mode 100644 index 259f2109..00000000 --- a/cli/src/plugins/plugin-output-shared/utils/ruleFilter.ts +++ /dev/null @@ -1,98 +0,0 @@ -import type {RulePrompt} from '../../plugin-shared' -import type {Project, ProjectConfig} from '../../plugin-shared/types' -import {filterByProjectConfig} from './filters' -import {resolveSubSeries} from './seriesFilter' - -export function normalizeSubdirPath(subdir: string): string { - let normalized = subdir.replaceAll(/\.\/+/g, '') - normalized = normalized.replaceAll(/\/+$/g, '') - return normalized -} - -function smartConcatGlob(prefix: string, glob: string): string { - if (glob.startsWith('**/')) return `${prefix}/${glob}` - if (glob.startsWith('*')) return `${prefix}/**/${glob}` - return `${prefix}/${glob}` -} - -function extractPrefixAndBaseGlob(glob: string, prefixes: readonly string[]): {prefix: string | null, baseGlob: string} { - for (const prefix of prefixes) { - const normalizedPrefix = prefix.replaceAll(/\/+$/g, '') - const patterns = [ - {prefix: normalizedPrefix, pattern: `${normalizedPrefix}/`}, - {prefix: normalizedPrefix, pattern: `${normalizedPrefix}\\`} - ] - for (const {prefix: p, pattern} of patterns) { - if (glob.startsWith(pattern)) return {prefix: p, baseGlob: glob.slice(pattern.length)} - } - if (glob === normalizedPrefix) return {prefix: normalizedPrefix, baseGlob: '**/*'} - } - return {prefix: null, baseGlob: glob} -} - -export function applySubSeriesGlobPrefix( - rules: readonly RulePrompt[], - projectConfig: ProjectConfig | undefined -): readonly RulePrompt[] { - const subSeries = resolveSubSeries(projectConfig?.subSeries, projectConfig?.rules?.subSeries) - if (Object.keys(subSeries).length === 0) return rules - - const normalizedSubSeries: Record = {} - for (const [subdir, seriNames] of Object.entries(subSeries)) { - const normalizedSubdir = normalizeSubdirPath(subdir) - normalizedSubSeries[normalizedSubdir] = seriNames - } - - const allPrefixes = Object.keys(normalizedSubSeries) - - return rules.map(rule => { - if (rule.seriName == null) return rule - - const matchedPrefixes: string[] = [] - for (const [subdir, seriNames] of Object.entries(normalizedSubSeries)) { - const matched = Array.isArray(rule.seriName) - ? rule.seriName.some(name => seriNames.includes(name)) - : seriNames.includes(rule.seriName) - if (matched) matchedPrefixes.push(subdir) - } - - if (matchedPrefixes.length === 0) return rule - - const newGlobs: string[] = [] - for (const originalGlob of rule.globs) { - const {prefix: existingPrefix, baseGlob} = extractPrefixAndBaseGlob(originalGlob, allPrefixes) - - if (existingPrefix != null) newGlobs.push(originalGlob) - - for (const prefix of matchedPrefixes) { - if (prefix === existingPrefix) continue - const newGlob = smartConcatGlob(prefix, baseGlob) - if (!newGlobs.includes(newGlob)) newGlobs.push(newGlob) - } - } - - return { - ...rule, - globs: newGlobs - } - }) -} - -function normalizeRuleScope(rule: RulePrompt): string { - return rule.scope ?? 'project' -} - -/** - * Returns project-scoped rules for a given project, with sub-series glob prefix applied. - */ -export function getProjectRules(rules: readonly RulePrompt[], project: Project): readonly RulePrompt[] { - const projectRules = rules.filter(r => normalizeRuleScope(r) === 'project') - return applySubSeriesGlobPrefix(filterByProjectConfig(projectRules, project.projectConfig, 'rules'), project.projectConfig) -} - -/** - * Returns global-scoped rules from the given rule list. - */ -export function getGlobalRules(rules: readonly RulePrompt[]): readonly RulePrompt[] { - return rules.filter(r => normalizeRuleScope(r) === 'global') -} diff --git a/cli/src/plugins/plugin-output-shared/utils/seriesFilter.ts b/cli/src/plugins/plugin-output-shared/utils/seriesFilter.ts deleted file mode 100644 index 5aec1d04..00000000 --- a/cli/src/plugins/plugin-output-shared/utils/seriesFilter.ts +++ /dev/null @@ -1,95 +0,0 @@ -/** Core series filtering helpers. Delegates to the unified CLI Rust NAPI when available, falls back to pure-TS implementations otherwise. */ -import {createRequire} from 'node:module' -import process from 'node:process' - -function resolveEffectiveIncludeSeriesTS(topLevel?: readonly string[], typeSpecific?: readonly string[]): string[] { - if (topLevel == null && typeSpecific == null) return [] - return [...new Set([...topLevel ?? [], ...typeSpecific ?? []])] -} - -function matchesSeriesTS(seriName: string | readonly string[] | null | undefined, effectiveIncludeSeries: readonly string[]): boolean { - if (seriName == null) return true - if (effectiveIncludeSeries.length === 0) return true - if (typeof seriName === 'string') return effectiveIncludeSeries.includes(seriName) - return seriName.some(name => effectiveIncludeSeries.includes(name)) -} - -function resolveSubSeriesTS( - topLevel?: Readonly>, - typeSpecific?: Readonly> -): Record { - if (topLevel == null && typeSpecific == null) return {} - const merged: Record = {} - for (const [key, values] of Object.entries(topLevel ?? {})) merged[key] = [...values] - for (const [key, values] of Object.entries(typeSpecific ?? {})) { - merged[key] = Object.hasOwn(merged, key) ? [...new Set([...merged[key]!, ...values])] : [...values] - } - return merged -} - -interface SeriesFilterFns { - resolveEffectiveIncludeSeries: typeof resolveEffectiveIncludeSeriesTS - matchesSeries: typeof matchesSeriesTS - resolveSubSeries: typeof resolveSubSeriesTS -} - -function isSeriesFilterFns(candidate: unknown): candidate is SeriesFilterFns { - if (candidate == null || typeof candidate !== 'object') return false - const c = candidate as Record - return typeof c['matchesSeries'] === 'function' - && typeof c['resolveEffectiveIncludeSeries'] === 'function' - && typeof c['resolveSubSeries'] === 'function' -} - -function tryLoadNapi(): SeriesFilterFns | undefined { - const suffixMap: Record = { - 'win32-x64': 'win32-x64-msvc', - 'linux-x64': 'linux-x64-gnu', - 'linux-arm64': 'linux-arm64-gnu', - 'darwin-arm64': 'darwin-arm64', - 'darwin-x64': 'darwin-x64' - } - const suffix = suffixMap[`${process.platform}-${process.arch}`] - if (suffix == null) return void 0 - - const packageName = `@truenine/memory-sync-cli-${suffix}` - const binaryFile = `napi-memory-sync-cli.${suffix}.node` - - try { - const _require = createRequire(import.meta.url) - const candidates = [ - packageName, - `${packageName}/${binaryFile}`, - `./${binaryFile}` - ] - - for (const specifier of candidates) { - try { - const loaded = _require(specifier) as unknown - const possible = [loaded, (loaded as {default?: unknown})?.default, (loaded as {config?: unknown})?.config] - for (const candidate of possible) { - if (isSeriesFilterFns(candidate)) return candidate - } - } - catch {} - } - } - catch { /* NAPI unavailable — pure-TS fallback will be used */ } - return void 0 -} - -const { - resolveEffectiveIncludeSeries, - matchesSeries, - resolveSubSeries -}: SeriesFilterFns = tryLoadNapi() ?? { - resolveEffectiveIncludeSeries: resolveEffectiveIncludeSeriesTS, - matchesSeries: matchesSeriesTS, - resolveSubSeries: resolveSubSeriesTS -} - -export { - matchesSeries, - resolveEffectiveIncludeSeries, - resolveSubSeries -} diff --git a/cli/src/plugins/plugin-qoder-ide/index.ts b/cli/src/plugins/plugin-qoder-ide.ts similarity index 100% rename from cli/src/plugins/plugin-qoder-ide/index.ts rename to cli/src/plugins/plugin-qoder-ide.ts diff --git a/cli/src/plugins/plugin-qoder-ide/QoderIDEPluginOutputPlugin.ts b/cli/src/plugins/plugin-qoder-ide/QoderIDEPluginOutputPlugin.ts deleted file mode 100644 index 3d05b5fc..00000000 --- a/cli/src/plugins/plugin-qoder-ide/QoderIDEPluginOutputPlugin.ts +++ /dev/null @@ -1,426 +0,0 @@ -import type { - CommandPrompt, - OutputPluginContext, - OutputWriteContext, - ProjectChildrenMemoryPrompt, - RulePrompt, - RuleScope, - SkillPrompt, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import {Buffer} from 'node:buffer' -import * as path from 'node:path' -import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' -import {AbstractOutputPlugin, applySubSeriesGlobPrefix, filterCommandsByProjectConfig, filterRulesByProjectConfig, filterSkillsByProjectConfig} from '@truenine/plugin-output-shared' - -const QODER_CONFIG_DIR = '.qoder' -const RULES_SUBDIR = 'rules' -const COMMANDS_SUBDIR = 'commands' -const SKILLS_SUBDIR = 'skills' -const GLOBAL_RULE_FILE = 'global.md' -const PROJECT_RULE_FILE = 'always.md' -const CHILD_RULE_FILE_PREFIX = 'glob-' -const SKILL_FILE_NAME = 'SKILL.md' -const MCP_CONFIG_FILE = 'mcp.json' -const TRIGGER_ALWAYS = 'always_on' -const TRIGGER_GLOB = 'glob' -const RULE_GLOB_KEY = 'glob' -const RULE_FILE_PREFIX = 'rule-' - -export class QoderIDEPluginOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('QoderIDEPluginOutputPlugin', {globalConfigDir: QODER_CONFIG_DIR, indexignore: '.qoderignore'}) - } - - async registerProjectOutputDirs(ctx: OutputPluginContext): Promise { - const {projects} = ctx.collectedInputContext.workspace - return projects - .filter(p => p.dirFromWorkspacePath != null) - .map(p => this.createProjectRulesDirPath(p.dirFromWorkspacePath!)) - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {workspace, rules} = ctx.collectedInputContext - const {projects} = workspace - const {globalMemory} = ctx.collectedInputContext - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - if (globalMemory != null) results.push(this.createProjectRuleFilePath(projectDir, GLOBAL_RULE_FILE)) - - if (project.rootMemoryPrompt != null) results.push(this.createProjectRuleFilePath(projectDir, PROJECT_RULE_FILE)) - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) results.push(this.createProjectRuleFilePath(projectDir, this.buildChildRuleFileName(child))) - } - - if (rules != null && rules.length > 0) { // Handle project rules - const projectRules = applySubSeriesGlobPrefix( - filterRulesByProjectConfig( - rules.filter(r => this.normalizeRuleScope(r) === 'project'), - project.projectConfig - ), - project.projectConfig - ) - for (const rule of projectRules) { - const fileName = this.buildRuleFileName(rule) - results.push(this.createProjectRuleFilePath(projectDir, fileName)) - } - } - } - results.push(...this.registerProjectIgnoreOutputFiles(projects)) - return results - } - - async registerGlobalOutputDirs(ctx: OutputPluginContext): Promise { - const globalDir = this.getGlobalConfigDir() - const {commands, skills, rules} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const results: RelativePath[] = [] - - if (commands != null && commands.length > 0) { - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - if (filteredCommands.length > 0) results.push(this.createRelativePath(COMMANDS_SUBDIR, globalDir, () => COMMANDS_SUBDIR)) - } - - if (skills != null && skills.length > 0) { - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter.name - results.push(this.createRelativePath( - path.join(SKILLS_SUBDIR, skillName), - globalDir, - () => skillName - )) - } - } - - const globalRules = rules?.filter(r => this.normalizeRuleScope(r) === 'global') - if (globalRules != null && globalRules.length > 0) { - results.push(this.createRelativePath( - path.join(RULES_SUBDIR), - globalDir, - () => RULES_SUBDIR - )) - } - return results - } - - async registerGlobalOutputFiles(ctx: OutputPluginContext): Promise { - const globalDir = this.getGlobalConfigDir() - const {commands, skills, rules} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const results: RelativePath[] = [] - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - - if (commands != null && commands.length > 0) { - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - for (const cmd of filteredCommands) { - const fileName = this.transformCommandName(cmd, transformOptions) - results.push(this.createRelativePath( - path.join(COMMANDS_SUBDIR, fileName), - globalDir, - () => COMMANDS_SUBDIR - )) - } - } - - const globalRules = rules?.filter(r => this.normalizeRuleScope(r) === 'global') - if (globalRules != null && globalRules.length > 0) { - for (const rule of globalRules) { - const fileName = this.buildRuleFileName(rule) - results.push(this.createRelativePath( - path.join(RULES_SUBDIR, fileName), - globalDir, - () => RULES_SUBDIR - )) - } - } - - const filteredSkills = skills != null ? filterSkillsByProjectConfig(skills, projectConfig) : [] - if (filteredSkills.length > 0) { - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter.name - results.push(this.createRelativePath( - path.join(SKILLS_SUBDIR, skillName, SKILL_FILE_NAME), - globalDir, - () => skillName - )) - - if (skill.mcpConfig != null) { - results.push(this.createRelativePath( - path.join(SKILLS_SUBDIR, skillName, MCP_CONFIG_FILE), - globalDir, - () => skillName - )) - } - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) { - results.push(this.createRelativePath( - path.join(SKILLS_SUBDIR, skillName, childDoc.relativePath.replace(/\.mdx$/, '.md')), - globalDir, - () => skillName - )) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - results.push(this.createRelativePath( - path.join(SKILLS_SUBDIR, skillName, resource.relativePath), - globalDir, - () => skillName - )) - } - } - } - } - return results - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {workspace, globalMemory, commands, skills, rules, aiAgentIgnoreConfigFiles} = ctx.collectedInputContext - const hasProjectPrompts = workspace.projects.some( - p => p.rootMemoryPrompt != null || (p.childMemoryPrompts?.length ?? 0) > 0 - ) - const hasRules = (rules?.length ?? 0) > 0 - const hasQoderIgnore = aiAgentIgnoreConfigFiles?.some(f => f.fileName === '.qoderignore') ?? false - if (hasProjectPrompts || globalMemory != null || (commands?.length ?? 0) > 0 || (skills?.length ?? 0) > 0 || hasRules || hasQoderIgnore) return true - this.log.trace({action: 'skip', reason: 'noOutputs'}) - return false - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const {workspace, globalMemory, rules} = ctx.collectedInputContext - const {projects} = workspace - const fileResults: WriteResult[] = [] - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - const projectDir = project.dirFromWorkspacePath - - if (globalMemory != null) { - const content = this.buildAlwaysRuleContent(globalMemory.content as string) - fileResults.push(await this.writeProjectRuleFile(ctx, projectDir, GLOBAL_RULE_FILE, content, 'globalRule')) - } - - if (project.rootMemoryPrompt != null) { - const content = this.buildAlwaysRuleContent(project.rootMemoryPrompt.content as string) - fileResults.push(await this.writeProjectRuleFile(ctx, projectDir, PROJECT_RULE_FILE, content, 'projectRootRule')) - } - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) { - const fileName = this.buildChildRuleFileName(child) - const content = this.buildGlobRuleContent(child) - fileResults.push(await this.writeProjectRuleFile(ctx, projectDir, fileName, content, 'projectChildRule')) - } - } - - if (rules != null && rules.length > 0) { // Write project rules - const projectRules = applySubSeriesGlobPrefix( - filterRulesByProjectConfig( - rules.filter(r => this.normalizeRuleScope(r) === 'project'), - project.projectConfig - ), - project.projectConfig - ) - for (const rule of projectRules) { - const fileName = this.buildRuleFileName(rule) - const content = this.buildRuleContent(rule) - fileResults.push(await this.writeProjectRuleFile(ctx, projectDir, fileName, content, 'projectRule')) - } - } - } - const ignoreResults = await this.writeProjectIgnoreFiles(ctx) - fileResults.push(...ignoreResults) - return {files: fileResults, dirs: []} - } - - async writeGlobalOutputs(ctx: OutputWriteContext): Promise { - const {commands, skills, rules} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const fileResults: WriteResult[] = [] - const globalDir = this.getGlobalConfigDir() - const commandsDir = path.join(globalDir, COMMANDS_SUBDIR) - const skillsDir = path.join(globalDir, SKILLS_SUBDIR) - const rulesDir = path.join(globalDir, RULES_SUBDIR) - - if (commands != null && commands.length > 0) { - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - for (const cmd of filteredCommands) fileResults.push(await this.writeGlobalCommand(ctx, commandsDir, cmd)) - } - - if (rules != null && rules.length > 0) { - const globalRules = rules.filter(r => this.normalizeRuleScope(r) === 'global') - for (const rule of globalRules) fileResults.push(await this.writeRuleFile(ctx, rulesDir, rule)) - } - - if (skills == null || skills.length === 0) return {files: fileResults, dirs: []} - - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - for (const skill of filteredSkills) fileResults.push(...await this.writeGlobalSkill(ctx, skillsDir, skill)) - return {files: fileResults, dirs: []} - } - - private createProjectRulesDirPath(projectDir: RelativePath): RelativePath { - return this.createRelativePath( - path.join(projectDir.path, QODER_CONFIG_DIR, RULES_SUBDIR), - projectDir.basePath, - () => RULES_SUBDIR - ) - } - - private createProjectRuleFilePath(projectDir: RelativePath, fileName: string): RelativePath { - return this.createRelativePath( - path.join(projectDir.path, QODER_CONFIG_DIR, RULES_SUBDIR, fileName), - projectDir.basePath, - () => RULES_SUBDIR - ) - } - - private buildChildRuleFileName(child: ProjectChildrenMemoryPrompt): string { - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '').replaceAll('/', '-') - return `${CHILD_RULE_FILE_PREFIX}${normalized.length > 0 ? normalized : 'root'}.md` - } - - private buildAlwaysRuleContent(content: string): string { - return buildMarkdownWithFrontMatter({trigger: TRIGGER_ALWAYS, type: 'user_command'}, content) - } - - private buildGlobRuleContent(child: ProjectChildrenMemoryPrompt): string { - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '') - const pattern = normalized.length === 0 ? '**/*' : `${normalized}/**` - return buildMarkdownWithFrontMatter({trigger: TRIGGER_GLOB, [RULE_GLOB_KEY]: pattern, type: 'user_command'}, child.content as string) - } - - private async writeProjectRuleFile( - ctx: OutputWriteContext, - projectDir: RelativePath, - fileName: string, - content: string, - label: string - ): Promise { - const rulesDir = path.join(projectDir.basePath, projectDir.path, QODER_CONFIG_DIR, RULES_SUBDIR) - const fullPath = path.join(rulesDir, fileName) - return this.writeFile(ctx, fullPath, content, label) - } - - private async writeGlobalCommand( - ctx: OutputWriteContext, - commandsDir: string, - cmd: CommandPrompt - ): Promise { - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - const fileName = this.transformCommandName(cmd, transformOptions) - const fullPath = path.join(commandsDir, fileName) - const fmData = this.buildCommandFrontMatter(cmd) - const content = buildMarkdownWithFrontMatter(fmData, cmd.content) - return this.writeFile(ctx, fullPath, content, 'globalFastCommand') - } - - private async writeRuleFile( - ctx: OutputWriteContext, - rulesDir: string, - rule: RulePrompt - ): Promise { - const fileName = this.buildRuleFileName(rule) - const fullPath = path.join(rulesDir, fileName) - const content = this.buildRuleContent(rule) - return this.writeFile(ctx, fullPath, content, 'rule') - } - - private async writeGlobalSkill( - ctx: OutputWriteContext, - skillsDir: string, - skill: SkillPrompt - ): Promise { - const results: WriteResult[] = [] - const skillName = skill.yamlFrontMatter.name - const skillDir = path.join(skillsDir, skillName) - const skillFilePath = path.join(skillDir, SKILL_FILE_NAME) - - const fmData = this.buildSkillFrontMatter(skill) - const content = buildMarkdownWithFrontMatter(fmData, skill.content as string) - results.push(await this.writeFile(ctx, skillFilePath, content, 'skill')) - - if (skill.mcpConfig != null) { - const mcpPath = path.join(skillDir, MCP_CONFIG_FILE) - results.push(await this.writeFile(ctx, mcpPath, skill.mcpConfig.rawContent, 'mcpConfig')) - } - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) { - const childPath = path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')) - results.push(await this.writeFile(ctx, childPath, childDoc.content as string, 'childDoc')) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - const resourcePath = path.join(skillDir, resource.relativePath) - if (resource.encoding === 'base64') { - const buffer = Buffer.from(resource.content, 'base64') - const dir = path.dirname(resourcePath) - this.ensureDirectory(dir) - this.writeFileSyncBuffer(resourcePath, buffer) - results.push({ - path: this.createRelativePath(resource.relativePath, skillDir, () => skillName), - success: true - }) - } else results.push(await this.writeFile(ctx, resourcePath, resource.content, 'resource')) - } - } - return results - } - - protected override buildSkillFrontMatter(skill: SkillPrompt): Record { - const fm = skill.yamlFrontMatter - return { - name: fm.name, - description: fm.description, - type: 'user_command', - ...fm.displayName != null && {displayName: fm.displayName}, - ...fm.keywords != null && fm.keywords.length > 0 && {keywords: fm.keywords}, - ...fm.author != null && {author: fm.author}, - ...fm.version != null && {version: fm.version}, - ...fm.allowTools != null && fm.allowTools.length > 0 && {allowTools: fm.allowTools} - } - } - - private buildCommandFrontMatter(cmd: CommandPrompt): Record { - const fm = cmd.yamlFrontMatter - if (fm == null) return {description: 'Fast command', type: 'user_command'} - return { - description: fm.description, - type: 'user_command', - ...fm.argumentHint != null && {argumentHint: fm.argumentHint}, - ...fm.allowTools != null && fm.allowTools.length > 0 && {allowTools: fm.allowTools} - } - } - - protected override buildRuleFileName(rule: RulePrompt, prefix: string = RULE_FILE_PREFIX): string { - return `${prefix}${rule.series}-${rule.ruleName}.md` - } - - protected override buildRuleContent(rule: RulePrompt): string { - const fmData: Record = { - trigger: TRIGGER_GLOB, - [RULE_GLOB_KEY]: rule.globs.length > 0 ? rule.globs.join(', ') : '**/*', - type: 'user_command' - } - return buildMarkdownWithFrontMatter(fmData, rule.content) - } - - protected override normalizeRuleScope(rule: RulePrompt): RuleScope { - return rule.scope ?? 'global' - } -} diff --git a/cli/src/plugins/plugin-readme/index.ts b/cli/src/plugins/plugin-readme.ts similarity index 100% rename from cli/src/plugins/plugin-readme/index.ts rename to cli/src/plugins/plugin-readme.ts diff --git a/cli/src/plugins/plugin-readme/ReadmeMdConfigFileOutputPlugin.ts b/cli/src/plugins/plugin-readme/ReadmeMdConfigFileOutputPlugin.ts deleted file mode 100644 index 8458e381..00000000 --- a/cli/src/plugins/plugin-readme/ReadmeMdConfigFileOutputPlugin.ts +++ /dev/null @@ -1,128 +0,0 @@ -import type { - OutputPluginContext, - OutputWriteContext, - ReadmeFileKind, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' - -import * as fs from 'node:fs' -import * as path from 'node:path' -import {AbstractOutputPlugin} from '@truenine/plugin-output-shared' -import {FilePathKind, README_FILE_KIND_MAP} from '../plugin-shared' - -function resolveOutputFileName(fileKind?: ReadmeFileKind): string { - return README_FILE_KIND_MAP[fileKind ?? 'Readme'].out -} - -/** - * Output plugin for writing readme-family files to project directories. - * Reads README prompts collected by ReadmeMdInputPlugin and writes them - * to the corresponding project directories. - * - * Output mapping: - * - fileKind=Readme → README.md - * - fileKind=CodeOfConduct → CODE_OF_CONDUCT.md - * - fileKind=Security → SECURITY.md - * - * Supports: - * - Root files (written to project root) - * - Child files (written to project subdirectories) - * - Dry-run mode (preview without writing) - * - Clean operation (delete generated files) - */ -export class ReadmeMdConfigFileOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('ReadmeMdConfigFileOutputPlugin', {outputFileName: 'README.md'}) - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {readmePrompts} = ctx.collectedInputContext - - if (readmePrompts == null || readmePrompts.length === 0) return results - - for (const readme of readmePrompts) { - const {targetDir} = readme - const outputFileName = resolveOutputFileName(readme.fileKind) - const filePath = path.join(targetDir.path, outputFileName) - - results.push({ - pathKind: FilePathKind.Relative, - path: filePath, - basePath: targetDir.basePath, - getDirectoryName: () => targetDir.getDirectoryName(), - getAbsolutePath: () => path.join(targetDir.basePath, filePath) - }) - } - - return results - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {readmePrompts} = ctx.collectedInputContext - - if (readmePrompts?.length !== 0) return true - - this.log.debug('skipped', {reason: 'no README prompts to write'}) - return false - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - const {readmePrompts} = ctx.collectedInputContext - - if (readmePrompts == null || readmePrompts.length === 0) return {files: fileResults, dirs: dirResults} - - for (const readme of readmePrompts) { - const result = await this.writeReadmeFile(ctx, readme) - fileResults.push(result) - } - - return {files: fileResults, dirs: dirResults} - } - - private async writeReadmeFile( - ctx: OutputWriteContext, - readme: {projectName: string, targetDir: RelativePath, content: unknown, isRoot: boolean, fileKind?: ReadmeFileKind} - ): Promise { - const {targetDir} = readme - const outputFileName = resolveOutputFileName(readme.fileKind) - const filePath = path.join(targetDir.path, outputFileName) - const fullPath = path.join(targetDir.basePath, filePath) - const content = readme.content as string - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: filePath, - basePath: targetDir.basePath, - getDirectoryName: () => targetDir.getDirectoryName(), - getAbsolutePath: () => fullPath - } - - const label = readme.isRoot - ? `project:${readme.projectName}/${outputFileName}` - : `project:${readme.projectName}/${targetDir.path}/${outputFileName}` - - if (ctx.dryRun === true) { // Dry-run mode: log without writing - this.log.trace({action: 'dryRun', type: 'readme', path: fullPath, label}) - return {path: relativePath, success: true, skipped: false} - } - - try { // Actual write operation - const dir = path.dirname(fullPath) // Ensure target directory exists - if (!fs.existsSync(dir)) fs.mkdirSync(dir, {recursive: true}) - - fs.writeFileSync(fullPath, content, 'utf8') - this.log.trace({action: 'write', type: 'readme', path: fullPath, label}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'readme', path: fullPath, label, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } -} diff --git a/cli/src/plugins/plugin-shared/PluginNames.ts b/cli/src/plugins/plugin-shared/PluginNames.ts deleted file mode 100644 index cdb8d302..00000000 --- a/cli/src/plugins/plugin-shared/PluginNames.ts +++ /dev/null @@ -1,25 +0,0 @@ -export const PLUGIN_NAMES = { - AgentsOutput: 'AgentsOutputPlugin', - GeminiCLIOutput: 'GeminiCLIOutputPlugin', - CursorOutput: 'CursorOutputPlugin', - WindsurfOutput: 'WindsurfOutputPlugin', - ClaudeCodeCLIOutput: 'ClaudeCodeCLIOutputPlugin', - KiroIDEOutput: 'KiroCLIOutputPlugin', - OpencodeCLIOutput: 'OpencodeCLIOutputPlugin', - OpenAICodexCLIOutput: 'CodexCLIOutputPlugin', - DroidCLIOutput: 'DroidCLIOutputPlugin', - WarpIDEOutput: 'WarpIDEOutputPlugin', - TraeIDEOutput: 'TraeIDEOutputPlugin', - TraeCNIDEOutput: 'TraeCNIDEOutputPlugin', - QoderIDEOutput: 'QoderIDEPluginOutputPlugin', - JetBrainsCodeStyleOutput: 'JetBrainsIDECodeStyleConfigOutputPlugin', - JetBrainsAICodexOutput: 'JetBrainsAIAssistantCodexOutputPlugin', - AgentSkillsCompactOutput: 'GenericSkillsOutputPlugin', - GitExcludeOutput: 'GitExcludeOutputPlugin', - ReadmeOutput: 'ReadmeMdConfigFileOutputPlugin', - VSCodeOutput: 'VisualStudioCodeIDEConfigOutputPlugin', - EditorConfigOutput: 'EditorConfigOutputPlugin', - AntigravityOutput: 'AntigravityOutputPlugin' -} as const - -export type PluginName = (typeof PLUGIN_NAMES)[keyof typeof PLUGIN_NAMES] diff --git a/cli/src/plugins/plugin-shared/constants.ts b/cli/src/plugins/plugin-shared/constants.ts deleted file mode 100644 index fdd1df7c..00000000 --- a/cli/src/plugins/plugin-shared/constants.ts +++ /dev/null @@ -1,9 +0,0 @@ -import type {UserConfigFile} from './types/ConfigTypes.schema' - -export const PathPlaceholders = { - USER_HOME: '~', - WORKSPACE: '$WORKSPACE' -} as const - -type DefaultUserConfig = Readonly>> -export const DEFAULT_USER_CONFIG = {} as DefaultUserConfig diff --git a/cli/src/plugins/plugin-shared/index.ts b/cli/src/plugins/plugin-shared/index.ts deleted file mode 100644 index 741ae66c..00000000 --- a/cli/src/plugins/plugin-shared/index.ts +++ /dev/null @@ -1,28 +0,0 @@ -export { - AbstractPlugin -} from './AbstractPlugin' -export { - DEFAULT_USER_CONFIG, - PathPlaceholders -} from './constants' -export { - createLogger, - getGlobalLogLevel, - setGlobalLogLevel -} from './log' -export type { - ILogger, - LogLevel -} from './log' -export { - PLUGIN_NAMES -} from './PluginNames' -export type { - PluginName -} from './PluginNames' -export { - collectFileNames, - createMockProject, - createMockRulePrompt -} from './testing' -export * from './types' diff --git a/cli/src/plugins/plugin-shared/log.ts b/cli/src/plugins/plugin-shared/log.ts deleted file mode 100644 index 39aa1709..00000000 --- a/cli/src/plugins/plugin-shared/log.ts +++ /dev/null @@ -1,9 +0,0 @@ -export { - createLogger, - getGlobalLogLevel, - setGlobalLogLevel -} from '@truenine/logger' -export type { - ILogger, - LogLevel -} from '@truenine/logger' diff --git a/cli/src/plugins/plugin-shared/testing/index.ts b/cli/src/plugins/plugin-shared/testing/index.ts deleted file mode 100644 index d7887558..00000000 --- a/cli/src/plugins/plugin-shared/testing/index.ts +++ /dev/null @@ -1,65 +0,0 @@ -import type {RelativePath} from '../types/FileSystemTypes' -import type {Project, RulePrompt} from '../types/InputTypes' -import {FilePathKind, NamingCaseKind, PromptKind} from '../types/Enums' - -export function createMockRulePrompt( - series: string, - ruleName: string, - seriName: string | undefined, - scope: 'global' | 'project' = 'project' -): RulePrompt { - const content = '# Rule body' - const base = { - type: PromptKind.Rule, - content, - length: content.length, - filePathKind: FilePathKind.Relative, - dir: { - pathKind: FilePathKind.Relative, - path: '.', - basePath: '', - getDirectoryName: () => '.', - getAbsolutePath: () => '.' - }, - markdownContents: [], - yamlFrontMatter: { - description: 'Test rule', - globs: ['**/*.ts'], - namingCase: NamingCaseKind.KebabCase - }, - series, - ruleName, - globs: ['**/*.ts'], - scope - } - - return seriName != null - ? {...base, seriName} as RulePrompt - : base as RulePrompt -} - -export function createMockProject( - name: string, - basePath: string, - projectPath: string, - projectConfig?: unknown -): Project { - return { - name, - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: projectPath, - basePath, - getDirectoryName: () => name, - getAbsolutePath: () => `${basePath}/${projectPath}` - }, - ...projectConfig != null && {projectConfig: projectConfig as never} - } -} - -export function collectFileNames(results: RelativePath[]): string[] { - return results.map(r => { - const parts = r.path.split(/[/\\]/) - return parts.at(-1) ?? r.path - }) -} diff --git a/cli/src/plugins/plugin-shared/types/ConfigTypes.schema.ts b/cli/src/plugins/plugin-shared/types/ConfigTypes.schema.ts deleted file mode 100644 index ec1bfe6c..00000000 --- a/cli/src/plugins/plugin-shared/types/ConfigTypes.schema.ts +++ /dev/null @@ -1,174 +0,0 @@ -import {z} from 'zod/v3' - -/** - * Zod schema for a source/dist path pair. - * Both paths are relative to the shadow source project root. - */ -export const ZShadowSourceProjectDirPair = z.object({ - /** Source path (human-authored .cn.mdx files) */ - src: z.string(), - /** Output/compiled path (read by input plugins) */ - dist: z.string() -}) - -/** - * Zod schema for the shadow source project configuration. - * All paths are relative to `/`. - * @deprecated Use ZAindexConfig instead. - */ -export const ZShadowSourceProjectConfig = z.object({ - dir: z.string().default('aindex'), - skills: ZShadowSourceProjectDirPair, - commands: ZShadowSourceProjectDirPair, - subAgents: ZShadowSourceProjectDirPair, - rules: ZShadowSourceProjectDirPair, - globalPrompt: ZShadowSourceProjectDirPair, - workspacePrompt: ZShadowSourceProjectDirPair, - app: ZShadowSourceProjectDirPair, - ext: ZShadowSourceProjectDirPair, - arch: ZShadowSourceProjectDirPair -}) - -/** - * Zod schema for the aindex configuration. - * This is the user-facing configuration format in ~/.aindex/.tnmsc.json - * All paths are relative to `/`. - */ -export const ZAindexConfig = z.object({ - dir: z.string().default('aindex'), - skills: ZShadowSourceProjectDirPair, - commands: ZShadowSourceProjectDirPair, - subAgents: ZShadowSourceProjectDirPair, - rules: ZShadowSourceProjectDirPair, - globalPrompt: ZShadowSourceProjectDirPair, - workspacePrompt: ZShadowSourceProjectDirPair, - app: ZShadowSourceProjectDirPair, - ext: ZShadowSourceProjectDirPair, - arch: ZShadowSourceProjectDirPair -}) - -/** - * Zod schema for per-plugin command series override options - */ -export const ZCommandSeriesPluginOverride = z.object({ - includeSeriesPrefix: z.boolean().optional(), - seriesSeparator: z.string().optional() -}) - -/** - * Zod schema for command series configuration options - */ -export const ZCommandSeriesOptions = z.object({ - includeSeriesPrefix: z.boolean().optional(), - pluginOverrides: z.record(z.string(), ZCommandSeriesPluginOverride).optional() -}) - -/** - * Zod schema for user profile information - */ -export const ZUserProfile = z.object({ - name: z.string().optional(), - username: z.string().optional(), - gender: z.string().optional(), - birthday: z.string().optional() -}).catchall(z.unknown()) - -/** - * Zod schema for the user configuration file (.tnmsc.json). - * Supports both 'aindex' format and legacy 'shadowSourceProject' format. - * Note: Both formats have the same structure, shadowSourceProject is kept for backward compatibility. - */ -export const ZUserConfigFile = z.object({ - version: z.string().optional(), - workspaceDir: z.string().optional(), - /** Aindex configuration */ - aindex: ZAindexConfig.optional(), - /** @deprecated Use aindex instead. Kept for backward compatibility. */ - shadowSourceProject: ZShadowSourceProjectConfig.optional(), - logLevel: z.enum(['trace', 'debug', 'info', 'warn', 'error']).optional(), - commandSeriesOptions: ZCommandSeriesOptions.optional(), - profile: ZUserProfile.optional() -}) - -/** - * Convert UserConfigFile to ensure aindex field is populated. - * If shadowSourceProject is provided but aindex is not, copies shadowSourceProject to aindex. - * @deprecated This function is kept for backward compatibility. - */ -export function convertUserConfigAindexToShadowSourceProject( - config: z.infer -): z.infer { - if (config.aindex != null) { // If aindex is explicitly provided, use it directly - return config - } - - if (config.shadowSourceProject != null) { // If shadowSourceProject is provided but aindex is not, copy it to aindex - return { - ...config, - aindex: config.shadowSourceProject - } - } - - return config // Neither format provided - return as-is -} - -/** - * Zod schema for MCP project config - */ -export const ZMcpProjectConfig = z.object({names: z.array(z.string()).optional()}) - -/** - * Zod schema for per-type series filtering configuration. - * Shared by all four prompt type sections (rules, skills, subAgents, commands). - */ -export const ZTypeSeriesConfig = z.object({ - includeSeries: z.array(z.string()).optional(), - subSeries: z.record(z.string(), z.array(z.string())).optional() -}) - -/** - * Zod schema for project config - */ -export const ZProjectConfig = z.object({ - mcp: ZMcpProjectConfig.optional(), - includeSeries: z.array(z.string()).optional(), - subSeries: z.record(z.string(), z.array(z.string())).optional(), - rules: ZTypeSeriesConfig.optional(), - skills: ZTypeSeriesConfig.optional(), - subAgents: ZTypeSeriesConfig.optional(), - commands: ZTypeSeriesConfig.optional() -}) - -/** - * Zod schema for ConfigLoader options - */ -export const ZConfigLoaderOptions = z.object({ - configFileName: z.string().optional(), - searchPaths: z.array(z.string()).optional(), - searchCwd: z.boolean().optional(), - searchGlobal: z.boolean().optional() -}) - -export type AindexDirPair = z.infer -export type AindexConfig = z.infer - -/** @deprecated Use AindexDirPair instead */ -export type ShadowSourceProjectDirPair = AindexDirPair -/** @deprecated Use AindexConfig instead */ -export type ShadowSourceProjectConfig = AindexConfig -export type CommandSeriesPluginOverride = z.infer -export type CommandSeriesOptions = z.infer -export type UserConfigFile = z.infer -export type McpProjectConfig = z.infer -export type TypeSeriesConfig = z.infer -export type ProjectConfig = z.infer -export type ConfigLoaderOptions = z.infer - -/** - * Result of loading a config file - */ -export interface ConfigLoadResult { - readonly config: UserConfigFile - readonly source: string | null - readonly found: boolean -} diff --git a/cli/src/plugins/plugin-shared/types/Enums.ts b/cli/src/plugins/plugin-shared/types/Enums.ts deleted file mode 100644 index c782b9cf..00000000 --- a/cli/src/plugins/plugin-shared/types/Enums.ts +++ /dev/null @@ -1,75 +0,0 @@ -export enum PluginKind { - Input = 'Input', - Output = 'Output' -} - -export enum PromptKind { - GlobalMemory = 'GlobalMemory', - ProjectRootMemory = 'ProjectRootMemory', - ProjectChildrenMemory = 'ProjectChildrenMemory', - Command = 'Command', - SubAgent = 'SubAgent', - Skill = 'Skill', - SkillChildDoc = 'SkillChildDoc', - SkillResource = 'SkillResource', - SkillMcpConfig = 'SkillMcpConfig', - Readme = 'Readme', - Rule = 'Rule' -} - -/** - * Scope for prompt application (rules, skills, commands, subAgents) - */ -export type RuleScope = 'project' | 'global' | 'workspace' - -export enum ClaudeCodeCLISubAgentColors { - Red = 'Red', - Green = 'Green', - Blue = 'Blue', - Yellow = 'Yellow' -} - -/** - * Tools callable by AI Agent - */ -export enum CodingAgentTools { - Read = 'Read', - Write = 'Write', - Edit = 'Edit', - Grep = 'Grep' -} - -/** - * Naming convention - */ -export enum NamingCaseKind { - CamelCase = 'CamelCase', - PascalCase = 'PascalCase', - SnakeCase = 'SnakeCase', - KebabCase = 'KebabCase', - UpperCase = 'UpperCase', - LowerCase = 'LowerCase', - Original = 'Original' -} - -export enum GlobalConfigDirectoryType { - UserHome = 'UserHome', - External = 'External' -} - -/** - * Directory path kind - */ -export enum FilePathKind { - Relative = 'Relative', - Absolute = 'Absolute', - Root = 'Root' -} - -export enum IDEKind { - VSCode = 'VSCode', - IntellijIDEA = 'IntellijIDEA', - Git = 'Git', - EditorConfig = 'EditorConfig', - Original = 'Original' -} diff --git a/cli/src/plugins/plugin-shared/types/Errors.ts b/cli/src/plugins/plugin-shared/types/Errors.ts deleted file mode 100644 index 1379295d..00000000 --- a/cli/src/plugins/plugin-shared/types/Errors.ts +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Error thrown when a circular dependency is detected in the plugin graph. - */ -export class CircularDependencyError extends Error { - constructor(public readonly cycle: string[]) { - super(`Circular dependency detected: ${cycle.join(' -> ')}`) - this.name = 'CircularDependencyError' - } -} - -/** - * Error thrown when a plugin depends on a non-existent plugin. - */ -export class MissingDependencyError extends Error { - constructor( - public readonly pluginName: string, - public readonly missingDependency: string - ) { - super(`Plugin "${pluginName}" depends on non-existent plugin "${missingDependency}"`) - this.name = 'MissingDependencyError' - } -} - -/** - * Configuration validation error - * Error thrown when configuration file contains invalid fields - */ -export class ConfigValidationError extends Error { - constructor( - readonly field: string, - readonly reason: string, - readonly filePath?: string - ) { - const msg = filePath != null && filePath.length > 0 - ? `Invalid configuration field "${field}": ${reason} (file: ${filePath})` - : `Invalid configuration field "${field}": ${reason}` - super(msg) - this.name = 'ConfigValidationError' - } -} diff --git a/cli/src/plugins/plugin-shared/types/FileSystemTypes.ts b/cli/src/plugins/plugin-shared/types/FileSystemTypes.ts deleted file mode 100644 index 8528424e..00000000 --- a/cli/src/plugins/plugin-shared/types/FileSystemTypes.ts +++ /dev/null @@ -1,37 +0,0 @@ -import type {FilePathKind} from './Enums' - -/** - * Common directory representation - */ -export interface Path { - readonly pathKind: K - readonly path: string - readonly getDirectoryName: () => string -} - -/** - * Relative path directory - */ -export interface RelativePath extends Path { - readonly basePath: string - getAbsolutePath: () => string -} - -/** - * Absolute path directory - */ -export type AbsolutePath = Path - -export type RootPath = Path - -export interface FileContent< - C = unknown, - FK extends FilePathKind = FilePathKind.Relative, - F extends Path = RelativePath -> { - content: C - length: number - filePathKind: FK - dir: F - charsetEncoding?: BufferEncoding -} diff --git a/cli/src/plugins/plugin-shared/types/LocalizedTypes.ts b/cli/src/plugins/plugin-shared/types/LocalizedTypes.ts deleted file mode 100644 index f2a670c6..00000000 --- a/cli/src/plugins/plugin-shared/types/LocalizedTypes.ts +++ /dev/null @@ -1,224 +0,0 @@ -import type {PromptKind} from './Enums' -import type {Prompt} from './PromptTypes' - -/** - * Supported locale codes - */ -export type Locale = 'zh' | 'en' - -/** - * Localized content wrapper for a single locale - * Contains both compiled content and raw MDX source - */ -export interface LocalizedContent { - /** Compiled/processed content */ - readonly content: string - - /** Original MDX source (before compilation) */ - readonly rawMdx?: string - - /** Extracted front matter */ - readonly frontMatter?: Record - - /** File last modified timestamp */ - readonly lastModified: Date - - /** Full prompt object (optional, for extended access) */ - readonly prompt?: T - - /** Absolute file path */ - readonly filePath: string -} - -/** - * Source content container for all locales - */ -export interface LocalizedSource { - /** Chinese content (.cn.mdx) */ - readonly zh?: LocalizedContent - - /** English content (.mdx) */ - readonly en?: LocalizedContent - - /** Default locale content (typically zh) */ - readonly default: LocalizedContent - - /** Which locale is the default */ - readonly defaultLocale: Locale -} - -/** Universal localized prompt wrapper */ -export interface LocalizedPrompt { - readonly name: string // Prompt identifier name - readonly type: K // Prompt type kind - readonly src: LocalizedSource // Source files content (src directory) - readonly dist?: LocalizedContent // Compiled/dist content (dist directory, optional) - - /** Metadata flags */ - readonly metadata: { - readonly hasDist: boolean // Whether dist content exists - readonly hasMultipleLocales: boolean // Whether multiple locales exist in src - readonly isDirectoryStructure: boolean // Whether this is a directory-based prompt (like skills) - - /** Available child items (for directory structures) */ - readonly children?: string[] - } - - /** File paths for all variants */ - readonly paths: { - readonly zh?: string - readonly en?: string - readonly dist?: string - } -} - -/** - * Type aliases for specific prompt types - */ -export type LocalizedSkillPrompt = LocalizedPrompt< - import('./InputTypes').SkillPrompt, - PromptKind.Skill -> - -export type LocalizedCommandPrompt = LocalizedPrompt< - import('./InputTypes').CommandPrompt, - PromptKind.Command -> - -export type LocalizedSubAgentPrompt = LocalizedPrompt< - import('./InputTypes').SubAgentPrompt, - PromptKind.SubAgent -> - -export type LocalizedRulePrompt = LocalizedPrompt< - import('./InputTypes').RulePrompt, - PromptKind.Rule -> - -export type LocalizedReadmePrompt = LocalizedPrompt< - import('./InputTypes').ReadmePrompt, - PromptKind.Readme -> - -/** - * Unified prompts container for CollectedInputContext - * Replaces individual prompt arrays with localized versions - */ -export interface PromptsContext { - /** Skill prompts with localization */ - readonly skills: LocalizedSkillPrompt[] - - /** Command prompts with localization */ - readonly commands: LocalizedCommandPrompt[] - - /** Sub-agent prompts with localization */ - readonly subAgents: LocalizedSubAgentPrompt[] - - /** Rule prompts with localization */ - readonly rules: LocalizedRulePrompt[] - - /** Readme prompts with localization */ - readonly readme: LocalizedReadmePrompt[] - - /** Global memory prompt with localization */ - readonly globalMemory?: LocalizedPrompt - - /** Workspace memory prompt with localization */ - readonly workspaceMemory?: LocalizedPrompt -} - -/** - * Factory function type for creating localized prompts - */ -export type LocalizedPromptFactory = ( - name: string, - src: LocalizedSource, - dist?: LocalizedContent, - metadata?: Partial['metadata']> -) => LocalizedPrompt - -/** - * Options for reading localized prompts from different structures - */ -export interface LocalizedReadOptions { - /** File extensions for each locale */ - readonly localeExtensions: { - readonly zh: string - readonly en: string - } - - /** Entry file name (without extension, e.g., 'skill' for skills) */ - readonly entryFileName?: string - - /** Create prompt from content */ - readonly createPrompt: (content: string, locale: Locale, name: string) => T | Promise - - /** Prompt kind */ - readonly kind: K - - /** Whether this is a directory-based structure */ - readonly isDirectoryStructure: boolean -} - -/** - * Result of reading a directory structure (like skills) - */ -export interface DirectoryReadResult { - readonly prompts: LocalizedPrompt[] - readonly errors: ReadError[] -} - -/** - * Error during reading - */ -export interface ReadError { - readonly path: string - readonly error: Error - readonly phase: 'scan' | 'read' | 'compile' -} - -/** - * Locale selector for output plugins - */ -export interface LocaleSelector { - /** Select which locale to use for output */ - select: (localized: LocalizedPrompt) => LocalizedContent - - /** Check if a locale is available */ - isAvailable: (localized: LocalizedPrompt, locale: Locale) => boolean -} - -/** - * Configuration for localization behavior - */ -export interface LocalizationConfig { - /** Default locale for input reading */ - readonly defaultInputLocale: Locale - - /** Preferred locale for output (can be 'dist' to use compiled content) */ - readonly preferredOutputLocale: Locale | 'dist' - - /** Fallback behavior when preferred locale is not available */ - readonly fallbackBehavior: 'use-default' | 'skip' | 'throw' - - /** Whether to compile MDX on-the-fly if dist is missing */ - readonly autoCompile: boolean -} - -/** Default localization configuration */ -export const DEFAULT_LOCALIZATION_CONFIG: LocalizationConfig = { - defaultInputLocale: 'zh', - preferredOutputLocale: 'dist', - fallbackBehavior: 'use-default', - autoCompile: true -} - -/** - * Helper type to extract the prompt type from a LocalizedPrompt - */ -export type ExtractPromptType = T extends LocalizedPrompt ? P : never - -/** - * Helper type to extract the kind from a LocalizedPrompt - */ -export type ExtractPromptKind = T extends LocalizedPrompt ? K : never diff --git a/cli/src/plugins/plugin-shared/types/OutputTypes.ts b/cli/src/plugins/plugin-shared/types/OutputTypes.ts deleted file mode 100644 index 1e21c3c7..00000000 --- a/cli/src/plugins/plugin-shared/types/OutputTypes.ts +++ /dev/null @@ -1,24 +0,0 @@ -import type {GlobalConfigDirectoryType} from './Enums' -import type {AbsolutePath, RelativePath} from './FileSystemTypes' - -/** - * Global configuration based on user_home root directory - */ -export interface GlobalConfigDirectoryInUserHome { - readonly type: K - readonly directory: RelativePath -} - -/** - * Special, absolute path global memory prompt - */ -export interface GlobalConfigDirectoryInOther { - readonly type: K - readonly directory: AbsolutePath -} - -export type GlobalConfigDirectory = GlobalConfigDirectoryInUserHome | GlobalConfigDirectoryInOther - -export interface Target { - -} diff --git a/cli/src/plugins/plugin-shared/types/PluginTypes.ts b/cli/src/plugins/plugin-shared/types/PluginTypes.ts deleted file mode 100644 index 0053dad0..00000000 --- a/cli/src/plugins/plugin-shared/types/PluginTypes.ts +++ /dev/null @@ -1,392 +0,0 @@ -import type {ILogger} from '@truenine/logger' -import type {MdxGlobalScope} from '@truenine/md-compiler/globals' -import type {AindexConfig, CommandSeriesOptions} from './ConfigTypes.schema' -import type {PluginKind} from './Enums' -import type {RelativePath} from './FileSystemTypes' -import type { - CollectedInputContext, - Project -} from './InputTypes' - -export type FastGlobType = typeof import('fast-glob') - -/** - * Opaque type for ScopeRegistry. - * Concrete implementation lives in plugin-input-shared. - */ -export interface ScopeRegistryLike { - resolve: (expression: string) => string -} - -export interface Plugin { - readonly type: T - readonly name: string - readonly log: ILogger - readonly dependsOn?: readonly string[] -} - -export interface PluginContext { - logger: ILogger - fs: typeof import('node:fs') - path: typeof import('node:path') - glob: FastGlobType -} - -export interface InputPluginContext extends PluginContext { - readonly userConfigOptions: Required - readonly dependencyContext: Partial - - readonly globalScope?: MdxGlobalScope - - readonly scopeRegistry?: ScopeRegistryLike -} - -export interface InputPlugin extends Plugin { - collect: (ctx: InputPluginContext) => Partial | Promise> -} - -/** - * Plugin that can enhance projects after all projects are collected. - * This is useful for plugins that need to add data to projects - * that were collected by other plugins. - */ -export interface ProjectEnhancerPlugin extends InputPlugin { - enhanceProjects: (ctx: InputPluginContext, projects: readonly Project[]) => Project[] -} - -/** - * Context for output plugin operations - */ -export interface OutputPluginContext extends PluginContext { - readonly collectedInputContext: CollectedInputContext - readonly pluginOptions?: PluginOptions -} - -/** - * Context for output cleaning operations - */ -export interface OutputCleanContext extends OutputPluginContext { - readonly dryRun?: boolean -} - -/** - * Context for output writing operations - */ -export interface OutputWriteContext extends OutputPluginContext { - readonly dryRun?: boolean - - readonly registeredPluginNames?: readonly string[] -} - -/** - * Result of a single write operation - */ -export interface WriteResult { - readonly path: RelativePath - readonly success: boolean - readonly skipped?: boolean - readonly error?: Error -} - -/** - * Result of executing a side effect. - * Used for both write and clean effects. - */ -export interface EffectResult { - /** Whether the effect executed successfully */ - readonly success: boolean - /** Error details if the effect failed */ - readonly error?: Error - /** Description of what the effect did (for logging) */ - readonly description?: string -} - -/** - * Collected results from write operations - */ -export interface WriteResults { - readonly files: readonly WriteResult[] - readonly dirs: readonly WriteResult[] -} - -/** - * Awaitable type for sync/async flexibility - */ -export type Awaitable = T | Promise - -/** - * Handler function for write effects. - * Receives the write context and returns an effect result. - */ -export type WriteEffectHandler = (ctx: OutputWriteContext) => Awaitable - -/** - * Handler function for clean effects. - * Receives the clean context and returns an effect result. - */ -export type CleanEffectHandler = (ctx: OutputCleanContext) => Awaitable - -/** - * Result of executing an input effect. - * Used for preprocessing/cleaning input sources before collection. - */ -export interface InputEffectResult { - /** Whether the effect executed successfully */ - readonly success: boolean - /** Error details if the effect failed */ - readonly error?: Error - /** Description of what the effect did (for logging) */ - readonly description?: string - /** Files that were modified/created */ - readonly modifiedFiles?: readonly string[] - /** Files that were deleted */ - readonly deletedFiles?: readonly string[] -} - -/** - * Context provided to input effect handlers. - * Contains utilities and configuration for effect execution. - */ -export interface InputEffectContext { - /** Logger instance */ - readonly logger: ILogger - /** File system module */ - readonly fs: typeof import('node:fs') - /** Path module */ - readonly path: typeof import('node:path') - /** Glob module for file matching */ - readonly glob: FastGlobType - /** Child process spawn function */ - readonly spawn: typeof import('node:child_process').spawn - /** User configuration options */ - readonly userConfigOptions: PluginOptions - /** Resolved workspace directory */ - readonly workspaceDir: string - /** Resolved aindex directory */ - readonly aindexDir: string - /** Whether running in dry-run mode */ - readonly dryRun?: boolean -} - -/** - * Handler function for input effects. - * Receives the effect context and returns an effect result. - */ -export type InputEffectHandler = (ctx: InputEffectContext) => Awaitable - -/** - * Registration entry for an input effect. - */ -export interface InputEffectRegistration { - /** Descriptive name for logging */ - readonly name: string - /** The effect handler function */ - readonly handler: InputEffectHandler - /** Priority for execution order (lower = earlier, default: 0) */ - readonly priority?: number -} - -/** - * Result of resolving base paths from plugin options. - */ -export interface ResolvedBasePaths { - /** The resolved workspace directory path */ - readonly workspaceDir: string - /** The resolved aindex directory path */ - readonly aindexDir: string -} - -/** - * Represents a registered scope entry from a plugin. - */ -export interface PluginScopeRegistration { - /** The namespace name (e.g., 'myPlugin') */ - readonly namespace: string - /** Key-value pairs registered under this namespace */ - readonly values: Record -} - -/** - * Registration entry for an effect. - */ -export interface EffectRegistration { - /** Descriptive name for logging */ - readonly name: string - /** The effect handler function */ - readonly handler: THandler -} - -/** - * Output plugin interface. - * Plugins directly implement lifecycle hooks as methods. - * All hooks support both sync and async implementations. - */ -export interface OutputPlugin extends Plugin { - registerProjectOutputDirs?: (ctx: OutputPluginContext) => Awaitable - - registerProjectOutputFiles?: (ctx: OutputPluginContext) => Awaitable - - registerGlobalOutputDirs?: (ctx: OutputPluginContext) => Awaitable - - registerGlobalOutputFiles?: (ctx: OutputPluginContext) => Awaitable - - canCleanProject?: (ctx: OutputCleanContext) => Awaitable - - canCleanGlobal?: (ctx: OutputCleanContext) => Awaitable - - onCleanComplete?: (ctx: OutputCleanContext) => Awaitable - - canWrite?: (ctx: OutputWriteContext) => Awaitable - - writeProjectOutputs?: (ctx: OutputWriteContext) => Awaitable - - writeGlobalOutputs?: (ctx: OutputWriteContext) => Awaitable - - onWriteComplete?: (ctx: OutputWriteContext, results: WriteResults) => Awaitable -} - -/** - * Collected outputs from all plugins. - * Used by the clean command to gather all artifacts for cleanup. - */ -export interface CollectedOutputs { - readonly projectDirs: readonly RelativePath[] - readonly projectFiles: readonly RelativePath[] - readonly globalDirs: readonly RelativePath[] - readonly globalFiles: readonly RelativePath[] -} - -/** - * Collect all outputs from all registered output plugins. - * This is the main entry point for the clean command. - */ -export async function collectAllPluginOutputs( - plugins: readonly OutputPlugin[], - ctx: OutputPluginContext -): Promise { - const projectDirs: RelativePath[] = [] - const projectFiles: RelativePath[] = [] - const globalDirs: RelativePath[] = [] - const globalFiles: RelativePath[] = [] - - for (const plugin of plugins) { - if (plugin.registerProjectOutputDirs) projectDirs.push(...await plugin.registerProjectOutputDirs(ctx)) - if (plugin.registerProjectOutputFiles) projectFiles.push(...await plugin.registerProjectOutputFiles(ctx)) - if (plugin.registerGlobalOutputDirs) globalDirs.push(...await plugin.registerGlobalOutputDirs(ctx)) - if (plugin.registerGlobalOutputFiles) globalFiles.push(...await plugin.registerGlobalOutputFiles(ctx)) - } - - return { - projectDirs, - projectFiles, - globalDirs, - globalFiles - } -} - -/** - * Result of checking if a plugin allows cleaning. - */ -export interface CleanPermission { - readonly project: boolean - readonly global: boolean -} - -/** - * Check if all plugins allow cleaning. - * Returns a map of plugin name to whether cleaning is allowed. - */ -export async function checkCanClean( - plugins: readonly OutputPlugin[], - ctx: OutputCleanContext -): Promise> { - const result = new Map() - - for (const plugin of plugins) { - result.set(plugin.name, {project: await plugin.canCleanProject?.(ctx) ?? true, global: await plugin.canCleanGlobal?.(ctx) ?? true}) - } - - return result -} - -/** - * Execute post-clean hooks for all plugins. - */ -export async function executeOnCleanComplete( - plugins: readonly OutputPlugin[], - ctx: OutputCleanContext -): Promise { - for (const plugin of plugins) await plugin.onCleanComplete?.(ctx) -} - -/** - * Result of checking if a plugin allows writing. - */ -export interface WritePermission { - readonly project: boolean - readonly global: boolean -} - -/** - * Check if all plugins allow writing. - * Returns a map of plugin name to whether writing is allowed. - */ -export async function checkCanWrite( - plugins: readonly OutputPlugin[], - ctx: OutputWriteContext -): Promise> { - const result = new Map() - - for (const plugin of plugins) { - const canWrite = await plugin.canWrite?.(ctx) ?? true - result.set(plugin.name, {project: canWrite, global: canWrite}) - } - - return result -} - -/** - * Execute write operations for all plugins. - * Respects dry-run mode in context. - */ -export async function executeWriteOutputs( - plugins: readonly OutputPlugin[], - ctx: OutputWriteContext -): Promise> { - const results = new Map() - - for (const plugin of plugins) { - const projectResults = await plugin.writeProjectOutputs?.(ctx) ?? {files: [], dirs: []} - const globalResults = await plugin.writeGlobalOutputs?.(ctx) ?? {files: [], dirs: []} - - const merged: WriteResults = { - files: [...projectResults.files, ...globalResults.files], - dirs: [...projectResults.dirs, ...globalResults.dirs] - } - - results.set(plugin.name, merged) - await plugin.onWriteComplete?.(ctx, merged) - } - - return results -} - -/** - * Configuration to be processed by plugin.config.ts - * Interpreted by plugin system as collection context - * Path placeholder `~` resolves to the user home directory. - * - * @see CollectedInputContext - Collected context - */ -export interface PluginOptions { - readonly version?: string - - readonly workspaceDir?: string - - readonly aindex?: AindexConfig - - readonly commandSeriesOptions?: CommandSeriesOptions - - plugins?: Plugin[] - logLevel?: 'trace' | 'debug' | 'info' | 'warn' | 'error' -} diff --git a/cli/src/plugins/plugin-shared/types/index.ts b/cli/src/plugins/plugin-shared/types/index.ts deleted file mode 100644 index c6df13eb..00000000 --- a/cli/src/plugins/plugin-shared/types/index.ts +++ /dev/null @@ -1,12 +0,0 @@ -export * from './AindexTypes' -export * from './ConfigTypes.schema' -export * from './Enums' -export * from './Errors' -export * from './ExportMetadataTypes' -export * from './FileSystemTypes' -export * from './InputTypes' -export * from './LocalizedTypes' -export * from './OutputTypes' -export * from './PluginTypes' -export * from './PromptTypes' -export * from './RegistryTypes' diff --git a/cli/src/plugins/plugin-trae-cn-ide/index.ts b/cli/src/plugins/plugin-trae-cn-ide.ts similarity index 100% rename from cli/src/plugins/plugin-trae-cn-ide/index.ts rename to cli/src/plugins/plugin-trae-cn-ide.ts diff --git a/cli/src/plugins/plugin-trae-cn-ide/TraeCNIDEOutputPlugin.ts b/cli/src/plugins/plugin-trae-cn-ide/TraeCNIDEOutputPlugin.ts deleted file mode 100644 index c3472932..00000000 --- a/cli/src/plugins/plugin-trae-cn-ide/TraeCNIDEOutputPlugin.ts +++ /dev/null @@ -1,72 +0,0 @@ -import type { - OutputPluginContext, - OutputWriteContext, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import {AbstractOutputPlugin} from '@truenine/plugin-output-shared' - -const GLOBAL_MEMORY_FILE = 'GLOBAL.md' -const GLOBAL_CONFIG_DIR = '.trae-cn' -const USER_RULES_SUBDIR = 'user_rules' - -export class TraeCNIDEOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('TraeCNIDEOutputPlugin', { - globalConfigDir: GLOBAL_CONFIG_DIR, - outputFileName: GLOBAL_MEMORY_FILE, - dependsOn: ['TraeIDEOutputPlugin'] - }) - } - - private getGlobalUserRulesDir(): string { - return this.joinPath(this.getGlobalConfigDir(), USER_RULES_SUBDIR) - } - - async registerProjectOutputDirs(): Promise { - return [] - } - - async registerProjectOutputFiles(): Promise { - return [] - } - - async registerGlobalOutputDirs(): Promise { - return [ - this.createRelativePath(USER_RULES_SUBDIR, this.getGlobalConfigDir(), () => USER_RULES_SUBDIR) - ] - } - - async registerGlobalOutputFiles(ctx: OutputPluginContext): Promise { - const {globalMemory} = ctx.collectedInputContext - const results: RelativePath[] = [] - - if (globalMemory != null) results.push(this.createRelativePath(GLOBAL_MEMORY_FILE, this.getGlobalUserRulesDir(), () => USER_RULES_SUBDIR)) - - return results - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {globalMemory} = ctx.collectedInputContext - if (globalMemory != null) return true - this.log.trace({action: 'skip', reason: 'noGlobalMemory'}) - return false - } - - async writeProjectOutputs(): Promise { - return {files: [], dirs: []} - } - - async writeGlobalOutputs(ctx: OutputWriteContext): Promise { - const {globalMemory} = ctx.collectedInputContext - const fileResults: WriteResult[] = [] - const userRulesDir = this.getGlobalUserRulesDir() - - if (globalMemory != null) { - fileResults.push(await this.writeFile(ctx, this.joinPath(userRulesDir, GLOBAL_MEMORY_FILE), globalMemory.content as string, 'globalMemory')) - } - - return {files: fileResults, dirs: []} - } -} diff --git a/cli/src/plugins/plugin-trae-ide/index.ts b/cli/src/plugins/plugin-trae-ide.ts similarity index 100% rename from cli/src/plugins/plugin-trae-ide/index.ts rename to cli/src/plugins/plugin-trae-ide.ts diff --git a/cli/src/plugins/plugin-trae-ide/TraeIDEOutputPlugin.ts b/cli/src/plugins/plugin-trae-ide/TraeIDEOutputPlugin.ts deleted file mode 100644 index 7bda3cf2..00000000 --- a/cli/src/plugins/plugin-trae-ide/TraeIDEOutputPlugin.ts +++ /dev/null @@ -1,388 +0,0 @@ -import type { - CommandPrompt, - OutputPluginContext, - OutputWriteContext, - Project, - ProjectChildrenMemoryPrompt, - SkillPrompt, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import {Buffer} from 'node:buffer' -import * as path from 'node:path' -import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' -import {AbstractOutputPlugin, filterCommandsByProjectConfig, filterSkillsByProjectConfig} from '@truenine/plugin-output-shared' -import {FilePathKind} from '../plugin-shared' - -const GLOBAL_MEMORY_FILE = 'GLOBAL.md' -const GLOBAL_CONFIG_DIR = '.trae' -const STEERING_SUBDIR = 'steering' -const RULES_SUBDIR = 'rules' -const COMMANDS_SUBDIR = 'commands' -const SKILLS_SUBDIR = 'skills' -const SKILL_FILE_NAME = 'SKILL.md' - -export class TraeIDEOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('TraeIDEOutputPlugin', {globalConfigDir: GLOBAL_CONFIG_DIR, outputFileName: GLOBAL_MEMORY_FILE, indexignore: '.traeignore'}) - } - - protected override getIgnoreOutputPath(): string | undefined { - if (this.indexignore == null) return void 0 - return path.join('.trae', '.ignore') - } - - private getGlobalSteeringDir(): string { - return this.joinPath(this.getGlobalConfigDir(), STEERING_SUBDIR) - } - - async registerProjectOutputDirs(ctx: OutputPluginContext): Promise { - const {projects} = ctx.collectedInputContext.workspace - const {commands, skills} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const results: RelativePath[] = [] - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - results.push(this.createRelativePath( // Register rules dir (existing) - this.joinPath(projectDir.path, GLOBAL_CONFIG_DIR, RULES_SUBDIR), - projectDir.basePath, - () => RULES_SUBDIR - )) - - if (commands != null && commands.length > 0) { // Register commands dir (new: per-project) - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - if (filteredCommands.length > 0) { - results.push(this.createRelativePath( - this.joinPath(projectDir.path, GLOBAL_CONFIG_DIR, COMMANDS_SUBDIR), - projectDir.basePath, - () => COMMANDS_SUBDIR - )) - } - } - - if (skills != null && skills.length > 0) { // Register skills dirs (new: per-project) - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter.name - results.push(this.createRelativePath( - this.joinPath(projectDir.path, GLOBAL_CONFIG_DIR, SKILLS_SUBDIR, skillName), - projectDir.basePath, - () => skillName - )) - } - } - } - - return results - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const {projects} = ctx.collectedInputContext.workspace - const {commands, skills} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const results: RelativePath[] = [] - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - const projectDir = project.dirFromWorkspacePath - - if (project.childMemoryPrompts != null) { // Child memory prompts (existing) - for (const child of project.childMemoryPrompts) { - results.push(this.createRelativePath( - this.joinPath(projectDir.path, GLOBAL_CONFIG_DIR, RULES_SUBDIR, this.buildSteeringFileName(child)), - projectDir.basePath, - () => RULES_SUBDIR - )) - } - } - - if (commands != null && commands.length > 0) { // Commands (new: per-project) - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - for (const cmd of filteredCommands) { - const fileName = this.transformCommandName(cmd, transformOptions) - results.push(this.createRelativePath( - this.joinPath(projectDir.path, GLOBAL_CONFIG_DIR, COMMANDS_SUBDIR, fileName), - projectDir.basePath, - () => COMMANDS_SUBDIR - )) - } - } - - if (skills != null && skills.length > 0) { // Skills (new: per-project) - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter.name - results.push(this.createRelativePath( - this.joinPath(projectDir.path, GLOBAL_CONFIG_DIR, SKILLS_SUBDIR, skillName, SKILL_FILE_NAME), - projectDir.basePath, - () => skillName - )) - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) { - const outputRelativePath = childDoc.relativePath.replace(/\.mdx$/, '.md') - results.push(this.createRelativePath( - this.joinPath(projectDir.path, GLOBAL_CONFIG_DIR, SKILLS_SUBDIR, skillName, outputRelativePath), - projectDir.basePath, - () => skillName - )) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) { - results.push(this.createRelativePath( - this.joinPath(projectDir.path, GLOBAL_CONFIG_DIR, SKILLS_SUBDIR, skillName, resource.relativePath), - projectDir.basePath, - () => skillName - )) - } - } - } - } - } - - results.push(...this.registerProjectIgnoreOutputFiles(projects)) - return results - } - - async registerGlobalOutputDirs(): Promise { - return [ - this.createRelativePath(STEERING_SUBDIR, this.getGlobalConfigDir(), () => STEERING_SUBDIR) - ] - } - - async registerGlobalOutputFiles(ctx: OutputPluginContext): Promise { - const {globalMemory} = ctx.collectedInputContext - const results: RelativePath[] = [] - - if (globalMemory != null) results.push(this.createRelativePath(GLOBAL_MEMORY_FILE, this.getGlobalSteeringDir(), () => STEERING_SUBDIR)) - - return results - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {workspace, globalMemory, commands, skills, aiAgentIgnoreConfigFiles} = ctx.collectedInputContext - const hasChildPrompts = workspace.projects.some(p => (p.childMemoryPrompts?.length ?? 0) > 0) - const hasCommands = (commands?.length ?? 0) > 0 - const hasSkills = (skills?.length ?? 0) > 0 - const hasTraeIgnore = aiAgentIgnoreConfigFiles?.some(f => f.fileName === '.traeignore') ?? false - if (hasChildPrompts || globalMemory != null || hasCommands || hasSkills || hasTraeIgnore) return true - this.log.trace({action: 'skip', reason: 'noOutputs'}) - return false - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const {projects} = ctx.collectedInputContext.workspace - const {commands, skills} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const fileResults: WriteResult[] = [] - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - const projectDir = project.dirFromWorkspacePath - - if (project.childMemoryPrompts != null) { // Child memory prompts (existing) - for (const child of project.childMemoryPrompts) fileResults.push(await this.writeSteeringFile(ctx, project, child)) - } - - if (commands != null && commands.length > 0) { // Commands (new: per-project) - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - for (const cmd of filteredCommands) fileResults.push(await this.writeProjectCommand(ctx, projectDir, cmd)) - } - - if (skills != null && skills.length > 0) { // Skills (new: per-project) - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - for (const skill of filteredSkills) fileResults.push(...await this.writeProjectSkill(ctx, projectDir, skill)) - } - } - - const ignoreResults = await this.writeProjectIgnoreFiles(ctx) - fileResults.push(...ignoreResults) - - return {files: fileResults, dirs: []} - } - - async writeGlobalOutputs(ctx: OutputWriteContext): Promise { - const {globalMemory} = ctx.collectedInputContext - const fileResults: WriteResult[] = [] - const steeringDir = this.getGlobalSteeringDir() - - if (globalMemory != null) { - fileResults.push(await this.writeFile(ctx, this.joinPath(steeringDir, GLOBAL_MEMORY_FILE), globalMemory.content as string, 'globalMemory')) - } - - return {files: fileResults, dirs: []} - } - - private async writeProjectCommand(ctx: OutputWriteContext, projectDir: RelativePath, cmd: CommandPrompt): Promise { - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - const fileName = this.transformCommandName(cmd, transformOptions) - const commandsDir = path.join(projectDir.basePath, projectDir.path, GLOBAL_CONFIG_DIR, COMMANDS_SUBDIR) - const fullPath = path.join(commandsDir, fileName) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: path.join(projectDir.path, GLOBAL_CONFIG_DIR, COMMANDS_SUBDIR, fileName), - basePath: projectDir.basePath, - getDirectoryName: () => COMMANDS_SUBDIR, - getAbsolutePath: () => fullPath - } - - const content = this.buildMarkdownContentWithRaw(cmd.content, cmd.yamlFrontMatter, cmd.rawFrontMatter) - - return this.writeFileWithHandling(ctx, fullPath, content, { - type: 'projectCommand', - relativePath - }) - } - - private async writeProjectSkill(ctx: OutputWriteContext, projectDir: RelativePath, skill: SkillPrompt): Promise { - const results: WriteResult[] = [] - const skillName = skill.yamlFrontMatter.name - const skillDir = path.join(projectDir.basePath, projectDir.path, GLOBAL_CONFIG_DIR, SKILLS_SUBDIR, skillName) - const skillFilePath = path.join(skillDir, SKILL_FILE_NAME) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: path.join(projectDir.path, GLOBAL_CONFIG_DIR, SKILLS_SUBDIR, skillName, SKILL_FILE_NAME), - basePath: projectDir.basePath, - getDirectoryName: () => skillName, - getAbsolutePath: () => skillFilePath - } - - const frontMatterData = this.buildSkillFrontMatter(skill) - const skillContent = buildMarkdownWithFrontMatter(frontMatterData, skill.content as string) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'projectSkill', path: skillFilePath}) - results.push({path: relativePath, success: true, skipped: false}) - } else { - try { - this.ensureDirectory(skillDir) - this.writeFileSync(skillFilePath, skillContent) - this.log.trace({action: 'write', type: 'projectSkill', path: skillFilePath}) - results.push({path: relativePath, success: true}) - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'projectSkill', path: skillFilePath, error: errMsg}) - results.push({path: relativePath, success: false, error: error as Error}) - } - } - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) results.push(await this.writeSkillChildDoc(ctx, childDoc, skillDir, skillName, projectDir)) - } - - if (skill.resources != null) { - for (const resource of skill.resources) results.push(await this.writeSkillResource(ctx, resource, skillDir, skillName, projectDir)) - } - - return results - } - - private async writeSkillChildDoc(ctx: OutputWriteContext, childDoc: {relativePath: string, content: unknown}, skillDir: string, skillName: string, projectDir: RelativePath): Promise { - const outputRelativePath = childDoc.relativePath.replace(/\.mdx$/, '.md') - const childDocPath = path.join(skillDir, outputRelativePath) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: path.join(projectDir.path, GLOBAL_CONFIG_DIR, SKILLS_SUBDIR, skillName, outputRelativePath), - basePath: projectDir.basePath, - getDirectoryName: () => skillName, - getAbsolutePath: () => childDocPath - } - - const content = childDoc.content as string - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'skillChildDoc', path: childDocPath}) - return {path: relativePath, success: true, skipped: false} - } - - try { - const parentDir = path.dirname(childDocPath) - this.ensureDirectory(parentDir) - this.writeFileSync(childDocPath, content) - this.log.trace({action: 'write', type: 'skillChildDoc', path: childDocPath}) - return {path: relativePath, success: true} - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'skillChildDoc', path: childDocPath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - private async writeSkillResource(ctx: OutputWriteContext, resource: {relativePath: string, content: string, encoding: 'text' | 'base64'}, skillDir: string, skillName: string, projectDir: RelativePath): Promise { - const resourcePath = path.join(skillDir, resource.relativePath) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: path.join(projectDir.path, GLOBAL_CONFIG_DIR, SKILLS_SUBDIR, skillName, resource.relativePath), - basePath: projectDir.basePath, - getDirectoryName: () => skillName, - getAbsolutePath: () => resourcePath - } - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'skillResource', path: resourcePath}) - return {path: relativePath, success: true, skipped: false} - } - - try { - const parentDir = path.dirname(resourcePath) - this.ensureDirectory(parentDir) - if (resource.encoding === 'base64') { - const buffer = Buffer.from(resource.content, 'base64') - this.writeFileSyncBuffer(resourcePath, buffer) - } else this.writeFileSync(resourcePath, resource.content) - this.log.trace({action: 'write', type: 'skillResource', path: resourcePath}) - return {path: relativePath, success: true} - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'skillResource', path: resourcePath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - protected override buildSkillFrontMatter(skill: SkillPrompt): Record { - const fm: Record = { - description: skill.yamlFrontMatter.description ?? '' - } - - if (skill.yamlFrontMatter.displayName != null) fm['name'] = skill.yamlFrontMatter.displayName - - return fm - } - - private buildSteeringFileName(child: ProjectChildrenMemoryPrompt): string { - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const normalized = childPath.replaceAll('\\', '/').replaceAll(/^\/+|\/+$/g, '').replaceAll('/', '-') - return `trae-${normalized}.md` - } - - private async writeSteeringFile(ctx: OutputWriteContext, project: Project, child: ProjectChildrenMemoryPrompt): Promise { - const projectDir = project.dirFromWorkspacePath! - const fileName = this.buildSteeringFileName(child) - const targetDir = this.joinPath(projectDir.basePath, projectDir.path, GLOBAL_CONFIG_DIR, RULES_SUBDIR) - const fullPath = this.joinPath(targetDir, fileName) - - const childPath = child.workingChildDirectoryPath?.path ?? child.dir.path - const globPattern = `${childPath.replaceAll('\\', '/')}/**` - - const content = [ - '---', - 'alwaysApply: false', - `globs: ${globPattern}`, - '---', - '', - child.content - ].join('\n') - - return this.writeFile(ctx, fullPath, content, 'steeringFile') - } -} diff --git a/cli/src/plugins/plugin-vscode/index.ts b/cli/src/plugins/plugin-vscode.ts similarity index 100% rename from cli/src/plugins/plugin-vscode/index.ts rename to cli/src/plugins/plugin-vscode.ts diff --git a/cli/src/plugins/plugin-vscode/VisualStudioCodeIDEConfigOutputPlugin.ts b/cli/src/plugins/plugin-vscode/VisualStudioCodeIDEConfigOutputPlugin.ts deleted file mode 100644 index e415e3ac..00000000 --- a/cli/src/plugins/plugin-vscode/VisualStudioCodeIDEConfigOutputPlugin.ts +++ /dev/null @@ -1,134 +0,0 @@ -import type { - OutputPluginContext, - OutputWriteContext, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import {AbstractOutputPlugin} from '@truenine/plugin-output-shared' -import {FilePathKind, IDEKind} from '../plugin-shared' - -const VSCODE_DIR = '.vscode' - -/** - * Default VS Code config files that this plugin manages. - * These are the relative paths within each project directory. - */ -const VSCODE_CONFIG_FILES = [ - '.vscode/settings.json', - '.vscode/extensions.json' -] as const - -export class VisualStudioCodeIDEConfigOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('VisualStudioCodeIDEConfigOutputPlugin') - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - const {vscodeConfigFiles} = ctx.collectedInputContext - - const hasVSCodeConfigs = vscodeConfigFiles != null && vscodeConfigFiles.length > 0 - if (!hasVSCodeConfigs) return results - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - if (project.isPromptSourceProject === true) continue - - for (const configFile of VSCODE_CONFIG_FILES) { - const filePath = this.joinPath(projectDir.path, configFile) - results.push({ - pathKind: FilePathKind.Relative, - path: filePath, - basePath: projectDir.basePath, - getDirectoryName: () => this.dirname(configFile), - getAbsolutePath: () => this.resolvePath(projectDir.basePath, filePath) - }) - } - } - - return results - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {vscodeConfigFiles} = ctx.collectedInputContext - const hasVSCodeConfigs = vscodeConfigFiles != null && vscodeConfigFiles.length > 0 - - if (hasVSCodeConfigs) return true - - this.log.debug('skipped', {reason: 'no VS Code config files found'}) - return false - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const {projects} = ctx.collectedInputContext.workspace - const {vscodeConfigFiles} = ctx.collectedInputContext - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - const vscodeConfigs = vscodeConfigFiles ?? [] - - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - const projectName = project.name ?? 'unknown' - - for (const config of vscodeConfigs) { - const result = await this.writeConfigFile(ctx, projectDir, config, `project:${projectName}`) - fileResults.push(result) - } - } - - return {files: fileResults, dirs: dirResults} - } - - private async writeConfigFile( - ctx: OutputWriteContext, - projectDir: RelativePath, - config: {type: IDEKind, content: string, dir: {path: string}}, - label: string - ): Promise { - const targetRelativePath = this.getTargetRelativePath(config) - const fullPath = this.resolvePath(projectDir.basePath, projectDir.path, targetRelativePath) - - const relativePath: RelativePath = { - pathKind: FilePathKind.Relative, - path: this.joinPath(projectDir.path, targetRelativePath), - basePath: projectDir.basePath, - getDirectoryName: () => this.dirname(targetRelativePath), - getAbsolutePath: () => fullPath - } - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'config', path: fullPath, label}) - return {path: relativePath, success: true, skipped: false} - } - - try { - const dir = this.dirname(fullPath) - this.ensureDirectory(dir) - this.writeFileSync(fullPath, config.content) - this.log.trace({action: 'write', type: 'config', path: fullPath, label}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'config', path: fullPath, label, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - private getTargetRelativePath(config: {type: IDEKind, dir: {path: string}}): string { - const sourcePath = config.dir.path - - if (config.type !== IDEKind.VSCode) return this.basename(sourcePath) - - const vscodeIndex = sourcePath.indexOf(VSCODE_DIR) - if (vscodeIndex !== -1) return sourcePath.slice(Math.max(0, vscodeIndex)) - return this.joinPath(VSCODE_DIR, this.basename(sourcePath)) - } -} diff --git a/cli/src/plugins/plugin-warp-ide/index.ts b/cli/src/plugins/plugin-warp-ide.ts similarity index 100% rename from cli/src/plugins/plugin-warp-ide/index.ts rename to cli/src/plugins/plugin-warp-ide.ts diff --git a/cli/src/plugins/plugin-warp-ide/WarpIDEOutputPlugin.ts b/cli/src/plugins/plugin-warp-ide/WarpIDEOutputPlugin.ts deleted file mode 100644 index 476f1c2a..00000000 --- a/cli/src/plugins/plugin-warp-ide/WarpIDEOutputPlugin.ts +++ /dev/null @@ -1,128 +0,0 @@ -import type { - OutputPluginContext, - OutputWriteContext, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import {AbstractOutputPlugin} from '@truenine/plugin-output-shared' -import {PLUGIN_NAMES} from '../plugin-shared' - -const PROJECT_MEMORY_FILE = 'WARP.md' - -export class WarpIDEOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('WarpIDEOutputPlugin', {outputFileName: PROJECT_MEMORY_FILE, indexignore: '.warpindexignore'}) - } - - private isAgentsPluginRegisteredInCtx(ctx: OutputPluginContext | OutputWriteContext): boolean { - if ('registeredPluginNames' in ctx && ctx.registeredPluginNames != null) return ctx.registeredPluginNames.includes(PLUGIN_NAMES.AgentsOutput) - return false - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {projects} = ctx.collectedInputContext.workspace - const agentsRegistered = this.isAgentsPluginRegisteredInCtx(ctx) - - for (const project of projects) { - if (project.dirFromWorkspacePath == null) continue - - if (agentsRegistered) { - results.push(this.createFileRelativePath(project.dirFromWorkspacePath, PROJECT_MEMORY_FILE)) // When AgentsOutputPlugin is registered, register WARP.md for global prompt output to each project - } else { - if (project.rootMemoryPrompt != null) results.push(this.createFileRelativePath(project.dirFromWorkspacePath, PROJECT_MEMORY_FILE)) // Normal mode: register files for projects with prompts - - if (project.childMemoryPrompts != null) { - for (const child of project.childMemoryPrompts) { - if (child.dir != null && this.isRelativePath(child.dir)) results.push(this.createFileRelativePath(child.dir, PROJECT_MEMORY_FILE)) - } - } - } - } - - results.push(...this.registerProjectIgnoreOutputFiles(projects)) - return results - } - - async canWrite(ctx: OutputWriteContext): Promise { - const agentsRegistered = this.shouldSkipDueToPlugin(ctx, PLUGIN_NAMES.AgentsOutput) - const {workspace, globalMemory, aiAgentIgnoreConfigFiles} = ctx.collectedInputContext - - if (agentsRegistered) { - if (globalMemory == null) { // When AgentsOutputPlugin is registered, only write if we have global memory - this.log.debug('skipped', {reason: 'AgentsOutputPlugin registered but no global memory'}) - return false - } - return true - } - - const hasProjectOutputs = workspace.projects.some( // Normal mode: check for project outputs - p => p.rootMemoryPrompt != null || (p.childMemoryPrompts?.length ?? 0) > 0 - ) - - const hasWarpIgnore = aiAgentIgnoreConfigFiles?.some(f => f.fileName === '.warpindexignore') ?? false - - if (hasProjectOutputs || hasWarpIgnore) return true - - this.log.debug('skipped', {reason: 'no outputs to write'}) - return false - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const agentsRegistered = this.shouldSkipDueToPlugin(ctx, PLUGIN_NAMES.AgentsOutput) - const {workspace, globalMemory} = ctx.collectedInputContext - const {projects} = workspace - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - if (agentsRegistered) { - if (globalMemory != null) { - for (const project of projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - - const projectName = project.name ?? 'unknown' - const result = await this.writePromptFile(ctx, projectDir, globalMemory.content as string, `project:${projectName}/global-warp`) - fileResults.push(result) - } - } - - const ignoreResults = await this.writeProjectIgnoreFiles(ctx) - fileResults.push(...ignoreResults) - - return {files: fileResults, dirs: dirResults} - } - - const globalMemoryContent = this.extractGlobalMemoryContent(ctx) // Normal mode: write combined content - - for (const project of projects) { - const projectName = project.name ?? 'unknown' - const projectDir = project.dirFromWorkspacePath - - if (projectDir == null) continue - - if (project.rootMemoryPrompt != null) { // Write root memory prompt (only if exists) - const combinedContent = this.combineGlobalWithContent( - globalMemoryContent, - project.rootMemoryPrompt.content as string - ) - - const result = await this.writePromptFile(ctx, projectDir, combinedContent, `project:${projectName}/root`) - fileResults.push(result) - } - - if (project.childMemoryPrompts != null) { // Write children memory prompts - for (const child of project.childMemoryPrompts) { - const childResult = await this.writePromptFile(ctx, child.dir, child.content as string, `project:${projectName}/child:${child.workingChildDirectoryPath?.path ?? 'unknown'}`) - fileResults.push(childResult) - } - } - } - - const ignoreResults = await this.writeProjectIgnoreFiles(ctx) - fileResults.push(...ignoreResults) - - return {files: fileResults, dirs: dirResults} - } -} diff --git a/cli/src/plugins/plugin-windsurf/index.ts b/cli/src/plugins/plugin-windsurf.ts similarity index 100% rename from cli/src/plugins/plugin-windsurf/index.ts rename to cli/src/plugins/plugin-windsurf.ts diff --git a/cli/src/plugins/plugin-windsurf/WindsurfOutputPlugin.ts b/cli/src/plugins/plugin-windsurf/WindsurfOutputPlugin.ts deleted file mode 100644 index dc3bace2..00000000 --- a/cli/src/plugins/plugin-windsurf/WindsurfOutputPlugin.ts +++ /dev/null @@ -1,385 +0,0 @@ -import type {RuleContentOptions} from '@truenine/plugin-output-shared' -import type { - CommandPrompt, - OutputPluginContext, - OutputWriteContext, - RulePrompt, - SkillPrompt, - WriteResult, - WriteResults -} from '../plugin-shared' -import type {RelativePath} from '../plugin-shared/types' -import {Buffer} from 'node:buffer' -import * as fs from 'node:fs' -import * as path from 'node:path' -import {buildMarkdownWithFrontMatter} from '@truenine/md-compiler/markdown' -import {AbstractOutputPlugin, applySubSeriesGlobPrefix, filterCommandsByProjectConfig, filterRulesByProjectConfig, filterSkillsByProjectConfig} from '@truenine/plugin-output-shared' -import {FilePathKind, PLUGIN_NAMES} from '../plugin-shared' - -const CODEIUM_WINDSURF_DIR = '.codeium/windsurf' -const WORKFLOWS_SUBDIR = 'global_workflows' -const MEMORIES_SUBDIR = 'memories' -const GLOBAL_MEMORY_FILE = 'global_rules.md' -const SKILLS_SUBDIR = 'skills' -const SKILL_FILE_NAME = 'SKILL.md' -const WINDSURF_RULES_DIR = '.windsurf' -const WINDSURF_RULES_SUBDIR = 'rules' -const RULE_FILE_PREFIX = 'rule-' - -export class WindsurfOutputPlugin extends AbstractOutputPlugin { - constructor() { - super('WindsurfOutputPlugin', { - globalConfigDir: CODEIUM_WINDSURF_DIR, - outputFileName: '', - dependsOn: [PLUGIN_NAMES.AgentsOutput], - indexignore: '.codeiumignore' - }) - } - - async registerGlobalOutputDirs(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {commands, skills, rules} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - - if (commands != null && commands.length > 0) { - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - if (filteredCommands.length > 0) { - const workflowsDir = this.getGlobalWorkflowsDir() - results.push({pathKind: FilePathKind.Relative, path: WORKFLOWS_SUBDIR, basePath: this.getCodeiumWindsurfDir(), getDirectoryName: () => WORKFLOWS_SUBDIR, getAbsolutePath: () => workflowsDir}) - } - } - - if (skills != null && skills.length > 0) { - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter.name - const skillPath = path.join(this.getCodeiumWindsurfDir(), SKILLS_SUBDIR, skillName) - results.push({pathKind: FilePathKind.Relative, path: path.join(SKILLS_SUBDIR, skillName), basePath: this.getCodeiumWindsurfDir(), getDirectoryName: () => skillName, getAbsolutePath: () => skillPath}) - } - } - - const globalRules = rules?.filter(r => this.normalizeRuleScope(r) === 'global') - if (globalRules == null || globalRules.length === 0) return results - - const codeiumDir = this.getCodeiumWindsurfDir() - const memoriesDir = path.join(codeiumDir, MEMORIES_SUBDIR) - results.push({pathKind: FilePathKind.Relative, path: MEMORIES_SUBDIR, basePath: codeiumDir, getDirectoryName: () => MEMORIES_SUBDIR, getAbsolutePath: () => memoriesDir}) - return results - } - - async registerGlobalOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {skills, commands} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - - if (commands != null && commands.length > 0) { - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - const workflowsDir = this.getGlobalWorkflowsDir() - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - for (const cmd of filteredCommands) { - const fileName = this.transformCommandName(cmd, transformOptions) - const fullPath = path.join(workflowsDir, fileName) - results.push({pathKind: FilePathKind.Relative, path: path.join(WORKFLOWS_SUBDIR, fileName), basePath: this.getCodeiumWindsurfDir(), getDirectoryName: () => WORKFLOWS_SUBDIR, getAbsolutePath: () => fullPath}) - } - } - - const globalRules = ctx.collectedInputContext.rules?.filter(r => this.normalizeRuleScope(r) === 'global') - if (globalRules != null && globalRules.length > 0) { - const codeiumDir = this.getCodeiumWindsurfDir() - const memoriesDir = path.join(codeiumDir, MEMORIES_SUBDIR) - for (const rule of globalRules) { - const fileName = this.buildRuleFileName(rule) - const fullPath = path.join(memoriesDir, fileName) - results.push({pathKind: FilePathKind.Relative, path: path.join(MEMORIES_SUBDIR, fileName), basePath: codeiumDir, getDirectoryName: () => MEMORIES_SUBDIR, getAbsolutePath: () => fullPath}) - } - } - - const filteredSkills = skills != null ? filterSkillsByProjectConfig(skills, projectConfig) : [] - if (filteredSkills.length === 0) return results - - const skillsDir = this.getSkillsDir() - const codeiumDir = this.getCodeiumWindsurfDir() - for (const skill of filteredSkills) { - const skillName = skill.yamlFrontMatter.name - const skillDir = path.join(skillsDir, skillName) - results.push({pathKind: FilePathKind.Relative, path: path.join(SKILLS_SUBDIR, skillName, SKILL_FILE_NAME), basePath: codeiumDir, getDirectoryName: () => skillName, getAbsolutePath: () => path.join(skillDir, SKILL_FILE_NAME)}) - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) { - const outputRelativePath = childDoc.relativePath.replace(/\.mdx$/, '.md') - results.push({pathKind: FilePathKind.Relative, path: path.join(SKILLS_SUBDIR, skillName, outputRelativePath), basePath: codeiumDir, getDirectoryName: () => skillName, getAbsolutePath: () => path.join(skillDir, outputRelativePath)}) - } - } - - if (skill.resources != null) { - for (const resource of skill.resources) results.push({pathKind: FilePathKind.Relative, path: path.join(SKILLS_SUBDIR, skillName, resource.relativePath), basePath: codeiumDir, getDirectoryName: () => skillName, getAbsolutePath: () => path.join(skillDir, resource.relativePath)}) - } - } - return results - } - - async canWrite(ctx: OutputWriteContext): Promise { - const {skills, commands, globalMemory, rules, aiAgentIgnoreConfigFiles} = ctx.collectedInputContext - const hasSkills = (skills?.length ?? 0) > 0 - const hasCommands = (commands?.length ?? 0) > 0 - const hasRules = (rules?.length ?? 0) > 0 - const hasGlobalMemory = globalMemory != null - const hasCodeIgnore = aiAgentIgnoreConfigFiles?.some(f => f.fileName === '.codeiumignore') ?? false - - if (hasSkills || hasCommands || hasGlobalMemory || hasRules || hasCodeIgnore) return true - - this.log.trace({action: 'skip', reason: 'noOutputs'}) - return false - } - - async writeGlobalOutputs(ctx: OutputWriteContext): Promise { - const {skills, commands, globalMemory, rules} = ctx.collectedInputContext - const projectConfig = this.resolvePromptSourceProjectConfig(ctx) - const fileResults: WriteResult[] = [] - const dirResults: WriteResult[] = [] - - if (globalMemory != null) fileResults.push(await this.writeGlobalMemory(ctx, globalMemory.content as string)) - - if (skills != null && skills.length > 0) { - const filteredSkills = filterSkillsByProjectConfig(skills, projectConfig) - const skillsDir = this.getSkillsDir() - for (const skill of filteredSkills) fileResults.push(...await this.writeGlobalSkill(ctx, skillsDir, skill)) - } - - if (commands != null && commands.length > 0) { - const filteredCommands = filterCommandsByProjectConfig(commands, projectConfig) - const workflowsDir = this.getGlobalWorkflowsDir() - for (const cmd of filteredCommands) fileResults.push(await this.writeGlobalWorkflow(ctx, workflowsDir, cmd)) - } - - const globalRules = rules?.filter(r => this.normalizeRuleScope(r) === 'global') - if (globalRules == null || globalRules.length === 0) return {files: fileResults, dirs: dirResults} - - const memoriesDir = this.getGlobalMemoriesDir() - for (const rule of globalRules) fileResults.push(await this.writeRuleFile(ctx, memoriesDir, rule, this.getCodeiumWindsurfDir(), MEMORIES_SUBDIR)) - return {files: fileResults, dirs: dirResults} - } - - async registerProjectOutputDirs(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {workspace, rules} = ctx.collectedInputContext - if (rules == null || rules.length === 0) return results - - for (const project of workspace.projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - const projectRules = applySubSeriesGlobPrefix(filterRulesByProjectConfig(rules.filter(r => this.normalizeRuleScope(r) === 'project'), project.projectConfig), project.projectConfig) - if (projectRules.length === 0) continue - const rulesDirPath = path.join(projectDir.path, WINDSURF_RULES_DIR, WINDSURF_RULES_SUBDIR) - results.push({pathKind: FilePathKind.Relative, path: rulesDirPath, basePath: projectDir.basePath, getDirectoryName: () => WINDSURF_RULES_SUBDIR, getAbsolutePath: () => path.join(projectDir.basePath, rulesDirPath)}) - } - return results - } - - async registerProjectOutputFiles(ctx: OutputPluginContext): Promise { - const results: RelativePath[] = [] - const {workspace, rules} = ctx.collectedInputContext - - if (rules != null && rules.length > 0) { - for (const project of workspace.projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - const projectRules = applySubSeriesGlobPrefix(filterRulesByProjectConfig(rules.filter(r => this.normalizeRuleScope(r) === 'project'), project.projectConfig), project.projectConfig) - for (const rule of projectRules) { - const fileName = this.buildRuleFileName(rule) - const filePath = path.join(projectDir.path, WINDSURF_RULES_DIR, WINDSURF_RULES_SUBDIR, fileName) - results.push({pathKind: FilePathKind.Relative, path: filePath, basePath: projectDir.basePath, getDirectoryName: () => WINDSURF_RULES_SUBDIR, getAbsolutePath: () => path.join(projectDir.basePath, filePath)}) - } - } - } - - results.push(...this.registerProjectIgnoreOutputFiles(workspace.projects)) - return results - } - - async writeProjectOutputs(ctx: OutputWriteContext): Promise { - const fileResults: WriteResult[] = [] - const {workspace, rules} = ctx.collectedInputContext - - if (rules != null && rules.length > 0) { - for (const project of workspace.projects) { - const projectDir = project.dirFromWorkspacePath - if (projectDir == null) continue - const projectRules = applySubSeriesGlobPrefix(filterRulesByProjectConfig(rules.filter(r => this.normalizeRuleScope(r) === 'project'), project.projectConfig), project.projectConfig) - if (projectRules.length === 0) continue - const rulesDir = path.join(projectDir.basePath, projectDir.path, WINDSURF_RULES_DIR, WINDSURF_RULES_SUBDIR) - for (const rule of projectRules) fileResults.push(await this.writeRuleFile(ctx, rulesDir, rule, projectDir.basePath, path.join(projectDir.path, WINDSURF_RULES_DIR, WINDSURF_RULES_SUBDIR))) - } - } - - fileResults.push(...await this.writeProjectIgnoreFiles(ctx)) - return {files: fileResults, dirs: []} - } - - private getSkillsDir(): string { return path.join(this.getCodeiumWindsurfDir(), SKILLS_SUBDIR) } - private getCodeiumWindsurfDir(): string { return path.join(this.getHomeDir(), CODEIUM_WINDSURF_DIR) } - private getGlobalMemoriesDir(): string { return path.join(this.getCodeiumWindsurfDir(), MEMORIES_SUBDIR) } - private getGlobalWorkflowsDir(): string { return path.join(this.getCodeiumWindsurfDir(), WORKFLOWS_SUBDIR) } - - private async writeGlobalMemory(ctx: OutputWriteContext, content: string): Promise { - const memoriesDir = this.getGlobalMemoriesDir() - const fullPath = path.join(memoriesDir, GLOBAL_MEMORY_FILE) - const codeiumDir = this.getCodeiumWindsurfDir() - const relativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(MEMORIES_SUBDIR, GLOBAL_MEMORY_FILE), basePath: codeiumDir, getDirectoryName: () => MEMORIES_SUBDIR, getAbsolutePath: () => fullPath} - - if (ctx.dryRun === true) { this.log.trace({action: 'dryRun', type: 'globalMemory', path: fullPath}); return {path: relativePath, success: true, skipped: false} } - - try { - this.ensureDirectory(memoriesDir) - this.writeFileSync(fullPath, content) - this.log.trace({action: 'write', type: 'globalMemory', path: fullPath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'globalMemory', path: fullPath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - private async writeGlobalWorkflow(ctx: OutputWriteContext, workflowsDir: string, cmd: CommandPrompt): Promise { - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - const fileName = this.transformCommandName(cmd, transformOptions) - const fullPath = path.join(workflowsDir, fileName) - const relativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(WORKFLOWS_SUBDIR, fileName), basePath: this.getCodeiumWindsurfDir(), getDirectoryName: () => WORKFLOWS_SUBDIR, getAbsolutePath: () => fullPath} - const content = this.buildMarkdownContentWithRaw(cmd.content, cmd.yamlFrontMatter, cmd.rawFrontMatter) - - if (ctx.dryRun === true) { this.log.trace({action: 'dryRun', type: 'globalWorkflow', path: fullPath}); return {path: relativePath, success: true, skipped: false} } - - try { - this.ensureDirectory(workflowsDir) - fs.writeFileSync(fullPath, content) - this.log.trace({action: 'write', type: 'globalWorkflow', path: fullPath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'globalWorkflow', path: fullPath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - private async writeGlobalSkill(ctx: OutputWriteContext, skillsDir: string, skill: SkillPrompt): Promise { - const results: WriteResult[] = [] - const skillName = skill.yamlFrontMatter.name - const skillDir = path.join(skillsDir, skillName) - const skillFilePath = path.join(skillDir, SKILL_FILE_NAME) - const codeiumDir = this.getCodeiumWindsurfDir() - const skillRelativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(SKILLS_SUBDIR, skillName, SKILL_FILE_NAME), basePath: codeiumDir, getDirectoryName: () => skillName, getAbsolutePath: () => skillFilePath} - - const frontMatterData = this.buildSkillFrontMatter(skill) - const skillContent = buildMarkdownWithFrontMatter(frontMatterData, skill.content as string) - - if (ctx.dryRun === true) { - this.log.trace({action: 'dryRun', type: 'skill', path: skillFilePath}) - results.push({path: skillRelativePath, success: true, skipped: false}) - } else { - try { - this.ensureDirectory(skillDir) - this.writeFileSync(skillFilePath, skillContent) - this.log.trace({action: 'write', type: 'skill', path: skillFilePath}) - results.push({path: skillRelativePath, success: true}) - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'skill', path: skillFilePath, error: errMsg}) - results.push({path: skillRelativePath, success: false, error: error as Error}) - } - } - - if (skill.childDocs != null) { - for (const childDoc of skill.childDocs) results.push(await this.writeSkillChildDoc(ctx, childDoc, skillDir, skillName, codeiumDir)) - } - - if (skill.resources != null) { - for (const resource of skill.resources) results.push(await this.writeSkillResource(ctx, resource, skillDir, skillName, codeiumDir)) - } - - return results - } - - private async writeSkillChildDoc(ctx: OutputWriteContext, childDoc: {relativePath: string, content: unknown}, skillDir: string, skillName: string, baseDir: string): Promise { - const outputRelativePath = childDoc.relativePath.replace(/\.mdx$/, '.md') - const childDocPath = path.join(skillDir, outputRelativePath) - const relativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(SKILLS_SUBDIR, skillName, outputRelativePath), basePath: baseDir, getDirectoryName: () => skillName, getAbsolutePath: () => childDocPath} - const content = childDoc.content as string - - if (ctx.dryRun === true) { this.log.trace({action: 'dryRun', type: 'childDoc', path: childDocPath}); return {path: relativePath, success: true, skipped: false} } - - try { - this.ensureDirectory(path.dirname(childDocPath)) - this.writeFileSync(childDocPath, content) - this.log.trace({action: 'write', type: 'childDoc', path: childDocPath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'childDoc', path: childDocPath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - private async writeSkillResource(ctx: OutputWriteContext, resource: {relativePath: string, content: string, encoding: 'text' | 'base64'}, skillDir: string, skillName: string, baseDir: string): Promise { - const resourcePath = path.join(skillDir, resource.relativePath) - const relativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(SKILLS_SUBDIR, skillName, resource.relativePath), basePath: baseDir, getDirectoryName: () => skillName, getAbsolutePath: () => resourcePath} - - if (ctx.dryRun === true) { this.log.trace({action: 'dryRun', type: 'resource', path: resourcePath}); return {path: relativePath, success: true, skipped: false} } - - try { - this.ensureDirectory(path.dirname(resourcePath)) - if (resource.encoding === 'base64') this.writeFileSyncBuffer(resourcePath, Buffer.from(resource.content, 'base64')) - else this.writeFileSync(resourcePath, resource.content) - this.log.trace({action: 'write', type: 'resource', path: resourcePath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'resource', path: resourcePath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } - - protected override buildRuleFileName(rule: RulePrompt, prefix: string = RULE_FILE_PREFIX): string { - return `${prefix}${rule.series}-${rule.ruleName}.md` - } - - protected override buildRuleContent(rule: RulePrompt, _options?: RuleContentOptions): string { - const fmData: Record = {trigger: 'glob', globs: rule.globs.length > 0 ? rule.globs.join(', ') : ''} - const raw = buildMarkdownWithFrontMatter(fmData, rule.content) - const lines = raw.split('\n') - return lines.map(line => { - const match = /^(\s*globs:\s*)(['"])(.*)\2\s*$/.exec(line) - if (match == null) return line - const prefix = match[1] ?? 'globs: ' - const value = match[3] ?? '' - if (value.trim().length === 0) return line - return `${prefix}${value}` - }).join('\n') - } - - private async writeRuleFile(ctx: OutputWriteContext, rulesDir: string, rule: RulePrompt, basePath: string, relativeSubdir: string): Promise { - const fileName = this.buildRuleFileName(rule) - const fullPath = path.join(rulesDir, fileName) - const relativePath: RelativePath = {pathKind: FilePathKind.Relative, path: path.join(relativeSubdir, fileName), basePath, getDirectoryName: () => WINDSURF_RULES_SUBDIR, getAbsolutePath: () => fullPath} - const content = this.buildRuleContent(rule) - - if (ctx.dryRun === true) { this.log.trace({action: 'dryRun', type: 'ruleFile', path: fullPath}); return {path: relativePath, success: true, skipped: false} } - - try { - this.ensureDirectory(rulesDir) - this.writeFileSync(fullPath, content) - this.log.trace({action: 'write', type: 'ruleFile', path: fullPath}) - return {path: relativePath, success: true} - } - catch (error) { - const errMsg = error instanceof Error ? error.message : String(error) - this.log.error({action: 'write', type: 'ruleFile', path: fullPath, error: errMsg}) - return {path: relativePath, success: false, error: error as Error} - } - } -} diff --git a/cli/src/public-config-paths.ts b/cli/src/public-config-paths.ts new file mode 100644 index 00000000..1aa4d70b --- /dev/null +++ b/cli/src/public-config-paths.ts @@ -0,0 +1,164 @@ +import type {ProxyCommand, ProxyContext} from '@truenine/script-runtime' +import type {IDEKind} from './plugins/plugin-core/enums' +import type {ProjectIDEConfigFile} from './plugins/plugin-core/InputTypes' +import * as fs from 'node:fs' +import * as path from 'node:path' +import process from 'node:process' +import {resolvePublicPath} from '@truenine/script-runtime' +import {AINDEX_FILE_NAMES} from './plugins/plugin-core/AindexTypes' +import {FilePathKind} from './plugins/plugin-core/enums' + +export const PUBLIC_CONFIG_DEFINITION_DIR = 'public' +export const PUBLIC_PROXY_FILE_NAME = 'proxy.ts' + +export const PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH = '.gitignore' +export const PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH = '.git/info/exclude' + +export const AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS = [ + AINDEX_FILE_NAMES.QODER_IGNORE, + AINDEX_FILE_NAMES.CURSOR_IGNORE, + AINDEX_FILE_NAMES.WARP_INDEX_IGNORE, + AINDEX_FILE_NAMES.AI_IGNORE, + AINDEX_FILE_NAMES.CODEIUM_IGNORE, + '.kiroignore', + '.traeignore' +] as const + +export const KNOWN_PUBLIC_CONFIG_TARGET_RELATIVE_PATHS = [ + PUBLIC_GIT_IGNORE_TARGET_RELATIVE_PATH, + PUBLIC_GIT_EXCLUDE_TARGET_RELATIVE_PATH, + AINDEX_FILE_NAMES.EDITOR_CONFIG, + AINDEX_FILE_NAMES.VSCODE_SETTINGS, + AINDEX_FILE_NAMES.VSCODE_EXTENSIONS, + AINDEX_FILE_NAMES.IDEA_PROJECT_XML, + AINDEX_FILE_NAMES.IDEA_CODE_STYLE_CONFIG_XML, + AINDEX_FILE_NAMES.IDEA_GITIGNORE, + ...AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS +] as const + +export interface PublicDefinitionResolveOptions { + readonly command?: ProxyCommand | undefined + readonly workspaceDir?: string | undefined +} + +const publicDefinitionPathCache = new Map() + +function normalizeTargetRelativePath(targetRelativePath: string): string { + const normalizedPath = targetRelativePath + .split(/[\\/]+/) + .filter(segment => segment.length > 0) + .join('/') + + if (normalizedPath.length === 0) throw new Error('public target relative path cannot be empty') + return normalizedPath +} + +function getPublicRootDir(aindexDir: string): string { + return path.join(aindexDir, PUBLIC_CONFIG_DEFINITION_DIR) +} + +function getPublicProxyPath(aindexDir: string): string { + return path.join(getPublicRootDir(aindexDir), PUBLIC_PROXY_FILE_NAME) +} + +function getResolveCommand(options?: PublicDefinitionResolveOptions): ProxyCommand { + return options?.command ?? 'execute' +} + +function getResolveWorkspaceDir( + aindexDir: string, + options?: PublicDefinitionResolveOptions +): string { + return path.resolve(options?.workspaceDir ?? path.dirname(aindexDir)) +} + +function buildProxyContext( + aindexDir: string, + workspaceDir: string, + command: ProxyCommand +): ProxyContext { + const resolvedAindexDir = path.resolve(aindexDir) + + return { + cwd: workspaceDir, + workspaceDir, + aindexDir: resolvedAindexDir, + command, + platform: process.platform + } +} + +function resolvePublicDefinitionRelativePath( + aindexDir: string, + targetRelativePath: string, + options?: PublicDefinitionResolveOptions +): string { + const normalizedTargetPath = normalizeTargetRelativePath(targetRelativePath) + if (normalizedTargetPath === PUBLIC_PROXY_FILE_NAME) return PUBLIC_PROXY_FILE_NAME + + const proxyFilePath = getPublicProxyPath(aindexDir) + if (!(fs.existsSync(proxyFilePath) && fs.statSync(proxyFilePath).isFile())) return normalizedTargetPath + + const command = getResolveCommand(options) + const workspaceDir = getResolveWorkspaceDir(aindexDir, options) + const cacheKey = [proxyFilePath, workspaceDir, command, normalizedTargetPath].join('::') + const cachedPath = publicDefinitionPathCache.get(cacheKey) + if (cachedPath != null) return cachedPath + + const resolvedRelativePath = resolvePublicPath( + proxyFilePath, + buildProxyContext(aindexDir, workspaceDir, command), + normalizedTargetPath + ) + + publicDefinitionPathCache.set(cacheKey, resolvedRelativePath) + return resolvedRelativePath +} + +export function resolvePublicDefinitionPath( + aindexDir: string, + targetRelativePath: string, + options?: PublicDefinitionResolveOptions +): string { + const resolvedRelativePath = resolvePublicDefinitionRelativePath(aindexDir, targetRelativePath, options) + return path.join(getPublicRootDir(aindexDir), ...resolvedRelativePath.split(/[\\/]+/)) +} + +export function collectKnownPublicConfigDefinitionPaths( + aindexDir: string, + options?: PublicDefinitionResolveOptions +): string[] { + const resolvedPaths = new Set([ + resolvePublicDefinitionPath(aindexDir, PUBLIC_PROXY_FILE_NAME) + ]) + + for (const targetRelativePath of KNOWN_PUBLIC_CONFIG_TARGET_RELATIVE_PATHS) { + resolvedPaths.add(resolvePublicDefinitionPath(aindexDir, targetRelativePath, options)) + } + + return [...resolvedPaths] +} + +export function readPublicIdeConfigDefinitionFile( + type: T, + targetRelativePath: string, + aindexDir: string, + fs: typeof import('node:fs'), + options?: PublicDefinitionResolveOptions +): ProjectIDEConfigFile | undefined { + const absolutePath = resolvePublicDefinitionPath(aindexDir, targetRelativePath, options) + if (!(fs.existsSync(absolutePath) && fs.statSync(absolutePath).isFile())) return void 0 + + const content = fs.readFileSync(absolutePath, 'utf8') + return { + type, + content, + length: content.length, + filePathKind: FilePathKind.Absolute, + dir: { + pathKind: FilePathKind.Absolute, + path: absolutePath, + getDirectoryName: () => path.basename(absolutePath) + } + } +} diff --git a/cli/src/schema.ts b/cli/src/schema.ts index 36f68f06..1ff13cf7 100644 --- a/cli/src/schema.ts +++ b/cli/src/schema.ts @@ -1,5 +1,5 @@ import {zodToJsonSchema} from 'zod-to-json-schema' -import {ZUserConfigFile} from './plugins/plugin-shared' +import {ZUserConfigFile} from './plugins/plugin-core' /** * JSON Schema for .tnmsc.json — auto-generated from ZUserConfigFile via zod-to-json-schema. diff --git a/cli/src/script-runtime-worker.ts b/cli/src/script-runtime-worker.ts new file mode 100644 index 00000000..ae6854a6 --- /dev/null +++ b/cli/src/script-runtime-worker.ts @@ -0,0 +1,19 @@ +import {readFileSync} from 'node:fs' +import process from 'node:process' +import {resolvePublicPathUnchecked} from '@truenine/script-runtime' + +async function main(): Promise { + const [, , filePath, ctxJsonPath, logicalPath] = process.argv + if (filePath == null || ctxJsonPath == null || logicalPath == null) throw new Error('Usage: script-runtime-worker ') + + const ctxJson = readFileSync(ctxJsonPath, 'utf8') + const ctx = JSON.parse(ctxJson) as Parameters[1] + const result = await resolvePublicPathUnchecked(filePath, ctx, logicalPath) + process.stdout.write(`${result}\n`) +} + +main().catch((error: unknown) => { + const message = error instanceof Error ? error.message : String(error) + process.stderr.write(`${message}\n`) + process.exit(1) +}) diff --git a/cli/src/utils/EffectUtils.ts b/cli/src/utils/EffectUtils.ts deleted file mode 100644 index a98bdaf7..00000000 --- a/cli/src/utils/EffectUtils.ts +++ /dev/null @@ -1,335 +0,0 @@ -import type {Buffer} from 'node:buffer' -import type {ILogger, InputEffectContext} from '../plugins/plugin-shared' -import process from 'node:process' - -/** - * Options for cleaning stale dist files. - */ -export interface CleanStaleDistOptions { - /** Source directory (e.g., src/skills) */ - readonly srcDir: string - /** Distribution directory (e.g., dist/skills) */ - readonly distDir: string - /** File extension to match (default: '.md') */ - readonly extension?: string - /** Whether to run in dry-run mode */ - readonly dryRun?: boolean - /** Logger instance */ - readonly logger?: ILogger | undefined -} - -/** - * Result of cleaning stale dist files. - */ -export interface CleanStaleDistResult { - /** Files that were deleted */ - readonly deletedFiles: string[] - /** Files that would be deleted (dry-run mode) */ - readonly wouldDelete: string[] - /** Errors encountered during deletion */ - readonly errors: {file: string, error: Error}[] -} - -/** - * Clean stale files in dist directory that don't have corresponding source files. - * Compares dist directory against src directory and removes orphaned files. - * - * @param ctx - Effect context containing fs and path modules - * @param options - Configuration options - * @returns Result containing deleted files and any errors - */ -export function cleanStaleDistFiles( - ctx: Pick, - options: CleanStaleDistOptions -): CleanStaleDistResult { - const {srcDir, distDir, extension = '.md', dryRun = false, logger} = options - const {fs, path: nodePath} = ctx - - const result: CleanStaleDistResult = { - deletedFiles: [], - wouldDelete: [], - errors: [] - } - - if (!fs.existsSync(distDir)) { // Check if directories exist - logger?.debug({action: 'cleanStaleDistFiles', message: 'dist directory does not exist', distDir}) - return result - } - - if (!fs.existsSync(srcDir)) { - logger?.debug({action: 'cleanStaleDistFiles', message: 'src directory does not exist', srcDir}) - return result - } - - const distEntries = fs.readdirSync(distDir, {withFileTypes: true}) // Get all files in dist directory - - for (const entry of distEntries) { - if (entry.isDirectory()) { - const srcSubDir = nodePath.join(srcDir, entry.name) // For directories, check if corresponding src directory exists - const distSubDir = nodePath.join(distDir, entry.name) - - if (!fs.existsSync(srcSubDir)) { - if (dryRun) { // Source directory doesn't exist, mark for deletion - result.wouldDelete.push(distSubDir) - logger?.debug({action: 'cleanStaleDistFiles', wouldDelete: distSubDir}) - } else { - try { - fs.rmSync(distSubDir, {recursive: true, force: true}) - result.deletedFiles.push(distSubDir) - logger?.debug({action: 'cleanStaleDistFiles', deleted: distSubDir}) - } - catch (error) { - result.errors.push({file: distSubDir, error: error as Error}) - logger?.warn({action: 'cleanStaleDistFiles', error: (error as Error).message, file: distSubDir}) - } - } - } else { - const subResult = cleanStaleDistFiles(ctx, { // Recursively clean subdirectory - srcDir: srcSubDir, - distDir: distSubDir, - extension, - dryRun, - logger - }) - result.deletedFiles.push(...subResult.deletedFiles) - result.wouldDelete.push(...subResult.wouldDelete) - result.errors.push(...subResult.errors) - } - } else if (entry.isFile() && entry.name.endsWith(extension)) { - const distFilePath = nodePath.join(distDir, entry.name) // For files, check if corresponding src file exists - - const baseName = entry.name.replace(extension, '') // Convention: dist/foo.md -> src/foo/skill.md or src/foo.cn.mdx // Try to find corresponding source file - const possibleSrcPaths = [ - nodePath.join(srcDir, baseName, 'skill.md'), - nodePath.join(srcDir, `${baseName}.cn.mdx`), - nodePath.join(srcDir, `${baseName}${extension}`), - nodePath.join(srcDir, entry.name) - ] - - const srcExists = possibleSrcPaths.some(p => fs.existsSync(p)) - - if (!srcExists) { - if (dryRun) { - result.wouldDelete.push(distFilePath) - logger?.debug({action: 'cleanStaleDistFiles', wouldDelete: distFilePath}) - } else { - try { - fs.unlinkSync(distFilePath) - result.deletedFiles.push(distFilePath) - logger?.debug({action: 'cleanStaleDistFiles', deleted: distFilePath}) - } - catch (error) { - result.errors.push({file: distFilePath, error: error as Error}) - logger?.warn({action: 'cleanStaleDistFiles', error: (error as Error).message, file: distFilePath}) - } - } - } - } - } - - return result -} - -/** - * Options for syncing directories. - */ -export interface SyncDirectoryOptions { - /** Source directory */ - readonly srcDir: string - /** Target directory */ - readonly targetDir: string - /** File pattern to match (glob pattern) */ - readonly pattern?: string - /** Whether to delete files in target that don't exist in source */ - readonly deleteOrphans?: boolean - /** Whether to run in dry-run mode */ - readonly dryRun?: boolean - /** Logger instance */ - readonly logger?: ILogger | undefined -} - -/** - * Result of directory sync operation. - */ -export interface SyncDirectoryResult { - /** Files that were copied */ - readonly copiedFiles: string[] - /** Files that were deleted (orphans) */ - readonly deletedFiles: string[] - /** Errors encountered */ - readonly errors: {file: string, error: Error}[] -} - -/** - * Sync files from source directory to target directory. - * Optionally removes orphaned files in target that don't exist in source. - * - * @param ctx - Effect context containing fs and path modules - * @param options - Configuration options - * @returns Result containing copied/deleted files and any errors - */ -export function syncDirectory( - ctx: Pick, - options: SyncDirectoryOptions -): SyncDirectoryResult { - const {srcDir, targetDir, deleteOrphans = false, dryRun = false, logger} = options - const {fs, path: nodePath} = ctx - - const result: SyncDirectoryResult = { - copiedFiles: [], - deletedFiles: [], - errors: [] - } - - if (!dryRun && !fs.existsSync(targetDir)) fs.mkdirSync(targetDir, {recursive: true}) // Ensure target directory exists - - if (!fs.existsSync(srcDir)) { // Check if source exists - logger?.debug({action: 'syncDirectory', message: 'source directory does not exist', srcDir}) - return result - } - - const srcEntries = fs.readdirSync(srcDir, {withFileTypes: true}) // Get source files - const srcNames = new Set(srcEntries.map(e => e.name)) - - for (const entry of srcEntries) { // Copy files from source to target - const srcPath = nodePath.join(srcDir, entry.name) - const targetPath = nodePath.join(targetDir, entry.name) - - if (entry.isFile()) { - if (!dryRun) { - try { - fs.copyFileSync(srcPath, targetPath) - result.copiedFiles.push(targetPath) - logger?.debug({action: 'syncDirectory', copied: targetPath}) - } - catch (error) { - result.errors.push({file: targetPath, error: error as Error}) - } - } else result.copiedFiles.push(targetPath) - } else if (entry.isDirectory()) { - const subResult = syncDirectory(ctx, { // Recursively sync subdirectories - srcDir: srcPath, - targetDir: targetPath, - deleteOrphans, - dryRun, - logger - }) - result.copiedFiles.push(...subResult.copiedFiles) - result.deletedFiles.push(...subResult.deletedFiles) - result.errors.push(...subResult.errors) - } - } - - if (!(deleteOrphans && fs.existsSync(targetDir))) return result // Delete orphaned files in target - - const targetEntries = fs.readdirSync(targetDir, {withFileTypes: true}) - for (const entry of targetEntries) { - if (!srcNames.has(entry.name)) { - const targetPath = nodePath.join(targetDir, entry.name) - if (!dryRun) { - try { - if (entry.isDirectory()) fs.rmSync(targetPath, {recursive: true, force: true}) - else fs.unlinkSync(targetPath) - result.deletedFiles.push(targetPath) - logger?.debug({action: 'syncDirectory', deleted: targetPath}) - } - catch (error) { - result.errors.push({file: targetPath, error: error as Error}) - } - } else result.deletedFiles.push(targetPath) - } - } - return result -} - -/** - * Options for executing a shell command as an effect. - */ -export interface ExecuteCommandOptions { - /** Effect context containing spawn function */ - readonly ctx: Pick - /** Command to execute */ - readonly command: string - /** Arguments for the command */ - readonly args?: readonly string[] - /** Working directory */ - readonly cwd?: string - /** Environment variables */ - readonly env?: Record - /** Timeout in milliseconds */ - readonly timeout?: number - /** Whether to run in dry-run mode */ - readonly dryRun?: boolean -} - -/** - * Result of command execution. - */ -export interface ExecuteCommandResult { - /** Whether the command succeeded (exit code 0) */ - readonly success: boolean - /** Exit code */ - readonly exitCode: number | null - /** Standard output */ - readonly stdout: string - /** Standard error */ - readonly stderr: string - /** Error if command failed to execute */ - readonly error?: Error -} - -/** - * Execute a shell command as an effect. - * Useful for running build scripts, compilers, etc. - * - * @param options - Command execution options - * @returns Result containing output and exit code - */ -export async function executeCommand(options: ExecuteCommandOptions): Promise { - const {ctx, command, args = [], cwd, env, timeout, dryRun = false} = options - const {spawn: spawnFn, logger} = ctx - - if (dryRun) { - logger?.debug({action: 'executeCommand', dryRun: true, command, args}) - return { - success: true, - exitCode: 0, - stdout: '', - stderr: '' - } - } - - return new Promise(resolve => { - const proc = spawnFn(command, [...args], { - cwd, - env: {...process.env, ...env}, - shell: true, - timeout - }) - - let stdout = '' - let stderr = '' - - proc.stdout?.on('data', (data: Buffer) => stdout += data.toString()) - - proc.stderr?.on('data', (data: Buffer) => stderr += data.toString()) - - proc.on('error', error => { - logger?.error({action: 'executeCommand', error: error.message, command}) - resolve({ - success: false, - exitCode: null, - stdout, - stderr, - error - }) - }) - - proc.on('close', code => { - const success = code === 0 - if (success) logger?.debug({action: 'executeCommand', success: true, command}) - else logger?.warn({action: 'executeCommand', success: false, exitCode: code, command, stderr}) - resolve({success, exitCode: code, stdout, stderr}) - }) - }) -} diff --git a/cli/src/utils/RelativePathFactory.ts b/cli/src/utils/RelativePathFactory.ts deleted file mode 100644 index 29d6aae5..00000000 --- a/cli/src/utils/RelativePathFactory.ts +++ /dev/null @@ -1,87 +0,0 @@ -import type {RelativePath} from '../plugins/plugin-shared' -import * as path from 'node:path' -import {FilePathKind} from '../plugins/plugin-shared' - -/** - * Options for creating a RelativePath - */ -export interface CreateRelativePathOptions { - /** The relative path string */ - readonly pathStr: string - /** The base path for resolving absolute paths */ - readonly basePath: string -} - -export function createRelativePath(options: CreateRelativePathOptions): RelativePath { - const {pathStr, basePath} = options - return { - pathKind: FilePathKind.Relative, - path: pathStr, - basePath, - getDirectoryName: () => path.dirname(pathStr), - getAbsolutePath: () => path.resolve(basePath, pathStr) - } -} - -/** - * Options for creating a RelativePath with a custom directory name - */ -export interface CreateRelativePathWithDirNameOptions extends CreateRelativePathOptions { - /** Custom directory name to return from getDirectoryName */ - readonly dirName: string -} - -/** - * Create a RelativePath with a custom getDirectoryName implementation. - * Useful when the directory name should be different from path.dirname(pathStr). - * - * @param options - Configuration including custom directory name - * @returns A RelativePath with custom getDirectoryName - */ -export function createRelativePathWithDirName(options: CreateRelativePathWithDirNameOptions): RelativePath { - const {pathStr, basePath, dirName} = options - return { - pathKind: FilePathKind.Relative, - path: pathStr, - basePath, - getDirectoryName: () => dirName, - getAbsolutePath: () => path.resolve(basePath, pathStr) - } -} - -/** - * Create a RelativePath for a file within a directory. - * The getDirectoryName returns the parent directory's name. - * - * @param dir - Parent directory RelativePath - * @param fileName - Name of the file - * @returns A RelativePath pointing to the file - */ -export function createFileRelativePath(dir: RelativePath, fileName: string): RelativePath { - const filePath = path.join(dir.path, fileName) - return { - pathKind: FilePathKind.Relative, - path: filePath, - basePath: dir.basePath, - getDirectoryName: () => dir.getDirectoryName(), - getAbsolutePath: () => path.join(dir.getAbsolutePath(), fileName) - } -} - -/** - * Create a RelativePath for a subdirectory. - * - * @param parent - Parent directory RelativePath - * @param subDirName - Name of the subdirectory - * @returns A RelativePath pointing to the subdirectory - */ -export function createSubdirRelativePath(parent: RelativePath, subDirName: string): RelativePath { - const subPath = path.join(parent.path, subDirName) - return { - pathKind: FilePathKind.Relative, - path: subPath, - basePath: parent.basePath, - getDirectoryName: () => subDirName, - getAbsolutePath: () => path.join(parent.getAbsolutePath(), subDirName) - } -} diff --git a/cli/src/utils/ResourceUtils.ts b/cli/src/utils/ResourceUtils.ts deleted file mode 100644 index 94677a01..00000000 --- a/cli/src/utils/ResourceUtils.ts +++ /dev/null @@ -1,242 +0,0 @@ -import type {SkillResourceCategory} from '../plugins/plugin-shared' -import {SKILL_RESOURCE_BINARY_EXTENSIONS} from '../plugins/plugin-shared' - -/** - * Check if a file extension is a binary resource extension. - * - * @param ext - The file extension (including the dot) - * @returns true if the extension is a binary type - */ -export function isBinaryResourceExtension(ext: string): boolean { - return (SKILL_RESOURCE_BINARY_EXTENSIONS as readonly string[]).includes(ext.toLowerCase()) -} - -/** - * Determine the resource category based on file extension. - * - * @param ext - The file extension (including the dot) - * @returns The resource category - */ -export function getResourceCategory(ext: string): SkillResourceCategory { - const lowerExt = ext.toLowerCase() - - const imageExtensions = [ // Image files - '.png', - '.jpg', - '.jpeg', - '.gif', - '.webp', - '.ico', - '.bmp', - '.tiff', - '.svg' - ] - if (imageExtensions.includes(lowerExt)) return 'image' - - const codeExtensions = [ // Code files - '.kt', - '.java', - '.py', - '.pyi', - '.pyx', - '.ts', - '.tsx', - '.js', - '.jsx', - '.mjs', - '.cjs', - '.go', - '.rs', - '.c', - '.cpp', - '.cc', - '.h', - '.hpp', - '.hxx', - '.cs', - '.fs', - '.fsx', - '.vb', - '.rb', - '.php', - '.swift', - '.scala', - '.groovy', - '.lua', - '.r', - '.jl', - '.ex', - '.exs', - '.erl', - '.clj', - '.cljs', - '.hs', - '.ml', - '.mli', - '.nim', - '.zig', - '.v', - '.dart', - '.vue', - '.svelte', - '.d.ts', - '.d.mts', - '.d.cts' - ] - if (codeExtensions.includes(lowerExt)) return 'code' - - const dataExtensions = [ // Data files - '.sql', - '.json', - '.jsonc', - '.json5', - '.xml', - '.xsd', - '.xsl', - '.xslt', - '.yaml', - '.yml', - '.toml', - '.csv', - '.tsv', - '.graphql', - '.gql', - '.proto' - ] - if (dataExtensions.includes(lowerExt)) return 'data' - - const documentExtensions = [ // Document files - '.txt', - '.text', - '.rtf', - '.log', - '.docx', - '.doc', - '.xlsx', - '.xls', - '.pptx', - '.ppt', - '.pdf', - '.odt', - '.ods', - '.odp' - ] - if (documentExtensions.includes(lowerExt)) return 'document' - - const configExtensions = [ // Config files - '.ini', - '.conf', - '.cfg', - '.config', - '.properties', - '.env', - '.envrc', - '.editorconfig', - '.gitignore', - '.gitattributes', - '.npmrc', - '.nvmrc', - '.npmignore', - '.eslintrc', - '.prettierrc', - '.stylelintrc', - '.babelrc', - '.browserslistrc' - ] - if (configExtensions.includes(lowerExt)) return 'config' - - const scriptExtensions = [ // Script files - '.sh', - '.bash', - '.zsh', - '.fish', - '.ps1', - '.psm1', - '.psd1', - '.bat', - '.cmd' - ] - if (scriptExtensions.includes(lowerExt)) return 'script' - - const binaryExtensions = [ // Binary files - '.exe', - '.dll', - '.so', - '.dylib', - '.bin', - '.wasm', - '.class', - '.jar', - '.war', - '.pyd', - '.pyc', - '.pyo', - '.zip', - '.tar', - '.gz', - '.bz2', - '.7z', - '.rar', - '.ttf', - '.otf', - '.woff', - '.woff2', - '.eot', - '.db', - '.sqlite', - '.sqlite3' - ] - if (binaryExtensions.includes(lowerExt)) return 'binary' - - return 'other' -} - -/** - * Get MIME type for a file extension. - * - * @param ext - The file extension (including the dot) - * @returns The MIME type or void 0 - */ -export function getMimeType(ext: string): string | void { - const mimeTypes: Record = { - '.ts': 'text/typescript', // Code - '.tsx': 'text/typescript', - '.js': 'text/javascript', - '.jsx': 'text/javascript', - '.json': 'application/json', - '.py': 'text/x-python', - '.java': 'text/x-java', - '.kt': 'text/x-kotlin', - '.go': 'text/x-go', - '.rs': 'text/x-rust', - '.c': 'text/x-c', - '.cpp': 'text/x-c++', - '.cs': 'text/x-csharp', - '.rb': 'text/x-ruby', - '.php': 'text/x-php', - '.swift': 'text/x-swift', - '.scala': 'text/x-scala', - '.sql': 'application/sql', // Data - '.xml': 'application/xml', - '.yaml': 'text/yaml', - '.yml': 'text/yaml', - '.toml': 'text/toml', - '.csv': 'text/csv', - '.graphql': 'application/graphql', - '.txt': 'text/plain', // Documents - '.pdf': 'application/pdf', - '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', - '.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', - '.html': 'text/html', // Web - '.css': 'text/css', - '.svg': 'image/svg+xml', - '.png': 'image/png', // Images - '.jpg': 'image/jpeg', - '.jpeg': 'image/jpeg', - '.gif': 'image/gif', - '.webp': 'image/webp', - '.ico': 'image/x-icon', - '.bmp': 'image/bmp' - } - return mimeTypes[ext.toLowerCase()] -} diff --git a/cli/src/utils/WriteHelper.ts b/cli/src/utils/WriteHelper.ts deleted file mode 100644 index 8364b2ec..00000000 --- a/cli/src/utils/WriteHelper.ts +++ /dev/null @@ -1,78 +0,0 @@ -import type {RelativePath} from '../plugins/plugin-shared' -import * as path from 'node:path' -import {createRelativePath} from '../plugins/desk-paths' - -export { - type SafeWriteOptions, - type SafeWriteResult, - writeFileSafe, - type WriteLogger -} from '../plugins/desk-paths' // Re-export from desk-paths - -/** - * Options for creating a RelativePath for output files - */ -export interface OutputPathOptions { - /** Relative path from basePath */ - readonly relativePath: string - /** Base directory for absolute path resolution */ - readonly basePath: string - /** Directory name to return from getDirectoryName() */ - readonly dirName: string -} - -/** - * Create a RelativePath object for output file registration. - * Delegates to desk-paths createRelativePath. - */ -export function createOutputPath(options: OutputPathOptions): RelativePath { - const {relativePath, basePath, dirName} = options - return createRelativePath(relativePath, basePath, () => dirName) -} - -/** - * Create a skill directory path - */ -export function createSkillDirPath(basePath: string, skillsSubDir: string, skillName: string): string { - return path.join(basePath, skillsSubDir, skillName) -} - -/** - * Create a command output path - */ -export function createCommandOutputPath( - globalDir: string, - commandsSubDir: string, - fileName: string -): {relativePath: RelativePath, fullPath: string} { - const fullPath = path.join(globalDir, commandsSubDir, fileName) - return { - fullPath, - relativePath: createOutputPath({ - relativePath: path.join(commandsSubDir, fileName), - basePath: globalDir, - dirName: commandsSubDir - }) - } -} - -/** - * Create a skill file output path - */ -export function createSkillOutputPath( - globalDir: string, - skillsSubDir: string, - skillName: string, - fileName: string -): {relativePath: RelativePath, fullPath: string} { - const skillPath = path.join(skillsSubDir, skillName, fileName) - const fullPath = path.join(globalDir, skillPath) - return { - fullPath, - relativePath: createOutputPath({ - relativePath: skillPath, - basePath: globalDir, - dirName: skillName - }) - } -} diff --git a/cli/src/utils/ruleFilter.ts b/cli/src/utils/ruleFilter.ts deleted file mode 100644 index cdaa2ea4..00000000 --- a/cli/src/utils/ruleFilter.ts +++ /dev/null @@ -1,85 +0,0 @@ -import type {ProjectConfig, RulePrompt} from '../plugins/plugin-shared' -import {matchesSeries, resolveEffectiveIncludeSeries, resolveSubSeries} from '../plugins/plugin-output-shared' - -function normalizeSubdirPath(subdir: string): string { - let normalized = subdir.replaceAll(/\.\/+/g, '') - normalized = normalized.replaceAll(/\/+$/g, '') - return normalized -} - -function smartConcatGlob(prefix: string, glob: string): string { - if (glob.startsWith('**/')) return `${prefix}/${glob}` - if (glob.startsWith('*')) return `${prefix}/**/${glob}` - return `${prefix}/${glob}` -} - -function extractPrefixAndBaseGlob(glob: string, prefixes: readonly string[]): {prefix: string | null, baseGlob: string} { - for (const prefix of prefixes) { - const normalizedPrefix = prefix.replaceAll(/\/+$/g, '') - const patterns = [ - {prefix: normalizedPrefix, pattern: `${normalizedPrefix}/`}, - {prefix: normalizedPrefix, pattern: `${normalizedPrefix}\\`} - ] - for (const {prefix: p, pattern} of patterns) { - if (glob.startsWith(pattern)) return {prefix: p, baseGlob: glob.slice(pattern.length)} - } - if (glob === normalizedPrefix) return {prefix: normalizedPrefix, baseGlob: '**/*'} - } - return {prefix: null, baseGlob: glob} -} - -export function applySubSeriesGlobPrefix( - rules: readonly RulePrompt[], - projectConfig: ProjectConfig | undefined -): readonly RulePrompt[] { - const subSeries = resolveSubSeries(projectConfig?.subSeries, projectConfig?.rules?.subSeries) - if (Object.keys(subSeries).length === 0) return rules - - const normalizedSubSeries: Record = {} - for (const [subdir, seriNames] of Object.entries(subSeries)) { - const normalizedSubdir = normalizeSubdirPath(subdir) - normalizedSubSeries[normalizedSubdir] = seriNames - } - - const allPrefixes = Object.keys(normalizedSubSeries) - - return rules.map(rule => { - if (rule.seriName == null) return rule - - const matchedPrefixes: string[] = [] - for (const [subdir, seriNames] of Object.entries(normalizedSubSeries)) { - const matched = Array.isArray(rule.seriName) - ? rule.seriName.some(name => seriNames.includes(name)) - : seriNames.includes(rule.seriName) - if (matched) matchedPrefixes.push(subdir) - } - - if (matchedPrefixes.length === 0) return rule - - const newGlobs: string[] = [] - for (const originalGlob of rule.globs) { - const {prefix: existingPrefix, baseGlob} = extractPrefixAndBaseGlob(originalGlob, allPrefixes) - - if (existingPrefix != null) newGlobs.push(originalGlob) - - for (const prefix of matchedPrefixes) { - if (prefix === existingPrefix) continue - const newGlob = smartConcatGlob(prefix, baseGlob) - if (!newGlobs.includes(newGlob)) newGlobs.push(newGlob) - } - } - - return { - ...rule, - globs: newGlobs - } - }) -} - -export function filterRulesByProjectConfig( - rules: readonly RulePrompt[], - projectConfig: ProjectConfig | undefined -): readonly RulePrompt[] { - const effectiveSeries = resolveEffectiveIncludeSeries(projectConfig?.includeSeries, projectConfig?.rules?.includeSeries) - return rules.filter(rule => matchesSeries(rule.seriName, effectiveSeries)) -} diff --git a/cli/src/versionCheck.ts b/cli/src/versionCheck.ts deleted file mode 100644 index fff363eb..00000000 --- a/cli/src/versionCheck.ts +++ /dev/null @@ -1,174 +0,0 @@ -import type {ILogger} from './plugins/plugin-shared' - -/** - * Get package name from build-time injection or fallback - */ -function getPackageName(): string { - return typeof __CLI_PACKAGE_NAME__ !== 'undefined' ? __CLI_PACKAGE_NAME__ : '@truenine/memory-sync-cli' -} - -/** - * Get npm registry URL for the package - */ -function getNpmRegistryUrl(): string { - return `https://registry.npmjs.org/${getPackageName()}/latest` -} - -/** - * Version comparison result - */ -export type VersionStatus = 'outdated' | 'current' | 'development' - -export interface VersionCheckResult { - readonly status: VersionStatus - readonly localVersion: string - readonly remoteVersion: string | null - readonly error?: string -} - -/** - * Parse semver version string into numeric components - * Returns [major, minor, patch] or null if invalid - */ -export function parseVersion(version: string): [number, number, number] | null { - const cleaned = version.replace(/^v/, '') // Remove leading 'v' if present - const match = /^(\d+)\.(\d+)\.(\d+)/.exec(cleaned) - if (match == null) return null - return [ - Number.parseInt(match[1]!, 10), - Number.parseInt(match[2]!, 10), - Number.parseInt(match[3]!, 10) - ] -} - -export function compareVersions(a: string, b: string): -1 | 0 | 1 { - const parsedA = parseVersion(a) - const parsedB = parseVersion(b) - - if (parsedA == null || parsedB == null) return 0 - - for (let i = 0; i < 3; i++) { - if (parsedA[i]! < parsedB[i]!) return -1 - if (parsedA[i]! > parsedB[i]!) return 1 - } - return 0 -} - -/** - * Timeout duration for fetching version (3 seconds) - */ -const FETCH_TIMEOUT_MS = 3000 - -/** - * Fetch latest version from npm registry - * Returns version string on success, or error message on failure - * Uses unref() on timeout to prevent blocking process exit - */ -export async function fetchLatestVersion(): Promise<{version: string} | {error: string}> { - const controller = new AbortController() - const timeoutId = setTimeout(() => controller.abort(), FETCH_TIMEOUT_MS) - if (typeof timeoutId === 'object' && 'unref' in timeoutId) timeoutId.unref() // Unref the timeout so it doesn't prevent process exit - - try { - const response = await fetch(getNpmRegistryUrl(), { - headers: {Accept: 'application/json'}, - signal: controller.signal - }) - clearTimeout(timeoutId) - - if (!response.ok) return {error: `HTTP ${response.status}: ${response.statusText}`} - const data = await response.json() as {version?: string} - if (data.version == null) return {error: 'Invalid response: missing version field'} - return {version: data.version} - } - catch (err) { - clearTimeout(timeoutId) - if (err instanceof Error) { - if (err.name === 'TimeoutError' || err.name === 'AbortError') return {error: `Request timeout after ${FETCH_TIMEOUT_MS}ms`} - return {error: err.message} - } - return {error: 'Unknown network error'} - } -} - -/** - * Get local CLI version - */ -export function getLocalVersion(): string { - return typeof __CLI_VERSION__ !== 'undefined' ? __CLI_VERSION__ : 'dev' -} - -/** - * Check if current version is outdated compared to npm registry - */ -export async function checkVersion(): Promise { - const localVersion = getLocalVersion() - - if (localVersion === 'dev') { // Development version, skip check - return { - status: 'development', - localVersion, - remoteVersion: null - } - } - - const fetchResult = await fetchLatestVersion() - - if ('error' in fetchResult) { - return { - status: 'current', - localVersion, - remoteVersion: null, - error: fetchResult.error - } - } - - const remoteVersion = fetchResult.version - const comparison = compareVersions(localVersion, remoteVersion) - - if (comparison < 0) return {status: 'outdated', localVersion, remoteVersion} - if (comparison > 0) return {status: 'development', localVersion, remoteVersion} - return {status: 'current', localVersion, remoteVersion} -} - -/** - * Log version check result - */ -export function logVersionCheckResult(result: VersionCheckResult, logger: ILogger): void { - const {status, localVersion, remoteVersion} = result - - switch (status) { - case 'outdated': logger.warn(`Version outdated: ${localVersion} → ${remoteVersion}. Run 'npm i -g ${getPackageName()}@latest' to update.`); break - case 'current': - if (result.error != null) logger.warn(`Version check skipped: ${result.error}`) - else logger.info(`Version ${localVersion} is up to date.`) - break - case 'development': - if (remoteVersion != null) logger.info(`Development version detected: ${localVersion} > ${remoteVersion}. Thanks for contributing!`) - else logger.debug('Running in development mode, version check skipped.') - break - } -} - -export function shouldCheckVersion(): boolean { - const minute = new Date().getMinutes() - return minute % 2 === 0 -} - -/** - * Perform version check on CLI startup if conditions are met - * This function is designed to not block process exit - it catches all errors - * and uses unref'd timers to ensure the process can exit normally - */ -export function startupVersionCheck(logger: ILogger): void { - if (!shouldCheckVersion()) return - - checkVersion() // The promise is intentionally not awaited // Run version check in background without blocking process exit - .then(result => { - if (result.status === 'outdated' || result.error != null) logVersionCheckResult(result, logger) // Log warnings for outdated versions or errors on startup - }) - .catch((err: unknown) => { - const message = err instanceof Error ? err.message : 'Unknown error' - logger.warn(`Version check skipped: ${message}`) - }) -} diff --git a/cli/tsconfig.json b/cli/tsconfig.json index 9f0bb669..2dd219c8 100644 --- a/cli/tsconfig.json +++ b/cli/tsconfig.json @@ -17,31 +17,31 @@ "@/*": [ "./src/*" ], - "@truenine/desk-paths": ["./src/plugins/desk-paths/index.ts"], + "@truenine/desk-paths": ["./src/plugins/desk-paths.ts"], "@truenine/desk-paths/*": ["./src/plugins/desk-paths/*"], "@truenine/plugin-output-shared": ["./src/plugins/plugin-output-shared/index.ts"], "@truenine/plugin-output-shared/*": ["./src/plugins/plugin-output-shared/*"], "@truenine/plugin-input-shared": ["./src/plugins/plugin-input-shared/index.ts"], "@truenine/plugin-input-shared/*": ["./src/plugins/plugin-input-shared/*"], - "@truenine/plugin-agentskills-compact": ["./src/plugins/plugin-agentskills-compact/index.ts"], - "@truenine/plugin-agentsmd": ["./src/plugins/plugin-agentsmd/index.ts"], + "@truenine/plugin-agentskills-compact": ["./src/plugins/plugin-agentskills-compact.ts"], + "@truenine/plugin-agentsmd": ["./src/plugins/plugin-agentsmd.ts"], "@truenine/plugin-antigravity": ["./src/plugins/plugin-antigravity/index.ts"], - "@truenine/plugin-claude-code-cli": ["./src/plugins/plugin-claude-code-cli/index.ts"], - "@truenine/plugin-cursor": ["./src/plugins/plugin-cursor/index.ts"], - "@truenine/plugin-droid-cli": ["./src/plugins/plugin-droid-cli/index.ts"], - "@truenine/plugin-editorconfig": ["./src/plugins/plugin-editorconfig/index.ts"], - "@truenine/plugin-gemini-cli": ["./src/plugins/plugin-gemini-cli/index.ts"], - "@truenine/plugin-git-exclude": ["./src/plugins/plugin-git-exclude/index.ts"], - "@truenine/plugin-jetbrains-ai-codex": ["./src/plugins/plugin-jetbrains-ai-codex/index.ts"], - "@truenine/plugin-jetbrains-codestyle": ["./src/plugins/plugin-jetbrains-codestyle/index.ts"], - "@truenine/plugin-openai-codex-cli": ["./src/plugins/plugin-openai-codex-cli/index.ts"], - "@truenine/plugin-opencode-cli": ["./src/plugins/plugin-opencode-cli/index.ts"], - "@truenine/plugin-qoder-ide": ["./src/plugins/plugin-qoder-ide/index.ts"], - "@truenine/plugin-readme": ["./src/plugins/plugin-readme/index.ts"], - "@truenine/plugin-trae-ide": ["./src/plugins/plugin-trae-ide/index.ts"], - "@truenine/plugin-vscode": ["./src/plugins/plugin-vscode/index.ts"], - "@truenine/plugin-warp-ide": ["./src/plugins/plugin-warp-ide/index.ts"], - "@truenine/plugin-windsurf": ["./src/plugins/plugin-windsurf/index.ts"] + "@truenine/plugin-claude-code-cli": ["./src/plugins/plugin-claude-code-cli.ts"], + "@truenine/plugin-cursor": ["./src/plugins/plugin-cursor.ts"], + "@truenine/plugin-droid-cli": ["./src/plugins/plugin-droid-cli.ts"], + "@truenine/plugin-editorconfig": ["./src/plugins/plugin-editorconfig.ts"], + "@truenine/plugin-gemini-cli": ["./src/plugins/plugin-gemini-cli.ts"], + "@truenine/plugin-git-exclude": ["./src/plugins/plugin-git-exclude.ts"], + "@truenine/plugin-jetbrains-ai-codex": ["./src/plugins/plugin-jetbrains-ai-codex.ts"], + "@truenine/plugin-jetbrains-codestyle": ["./src/plugins/plugin-jetbrains-codestyle.ts"], + "@truenine/plugin-openai-codex-cli": ["./src/plugins/plugin-openai-codex-cli.ts"], + "@truenine/plugin-opencode-cli": ["./src/plugins/plugin-opencode-cli.ts"], + "@truenine/plugin-qoder-ide": ["./src/plugins/plugin-qoder-ide.ts"], + "@truenine/plugin-readme": ["./src/plugins/plugin-readme.ts"], + "@truenine/plugin-trae-ide": ["./src/plugins/plugin-trae-ide.ts"], + "@truenine/plugin-vscode": ["./src/plugins/plugin-vscode.ts"], + "@truenine/plugin-warp-ide": ["./src/plugins/plugin-warp-ide.ts"], + "@truenine/plugin-windsurf": ["./src/plugins/plugin-windsurf.ts"] }, "resolveJsonModule": true, "allowImportingTsExtensions": true, diff --git a/cli/tsdown.config.ts b/cli/tsdown.config.ts index 0a27ce53..44ee7cd9 100644 --- a/cli/tsdown.config.ts +++ b/cli/tsdown.config.ts @@ -6,18 +6,18 @@ const pkg = JSON.parse(readFileSync('./package.json', 'utf8')) as {version: stri const kiroGlobalPowersRegistry = '{"version":"1.0.0","powers":{},"repoSources":{}}' const pluginAliases: Record = { - '@truenine/desk-paths': resolve('src/plugins/desk-paths/index.ts'), + '@truenine/desk-paths': resolve('src/plugins/desk-paths.ts'), '@truenine/plugin-output-shared': resolve('src/plugins/plugin-output-shared/index.ts'), '@truenine/plugin-input-shared': resolve('src/plugins/plugin-input-shared/index.ts'), - '@truenine/plugin-agentskills-compact': resolve('src/plugins/plugin-agentskills-compact/index.ts'), - '@truenine/plugin-agentsmd': resolve('src/plugins/plugin-agentsmd/index.ts'), + '@truenine/plugin-agentskills-compact': resolve('src/plugins/plugin-agentskills-compact.ts'), + '@truenine/plugin-agentsmd': resolve('src/plugins/plugin-agentsmd.ts'), '@truenine/plugin-antigravity': resolve('src/plugins/plugin-antigravity/index.ts'), - '@truenine/plugin-claude-code-cli': resolve('src/plugins/plugin-claude-code-cli/index.ts'), - '@truenine/plugin-cursor': resolve('src/plugins/plugin-cursor/index.ts'), - '@truenine/plugin-droid-cli': resolve('src/plugins/plugin-droid-cli/index.ts'), - '@truenine/plugin-editorconfig': resolve('src/plugins/plugin-editorconfig/index.ts'), - '@truenine/plugin-gemini-cli': resolve('src/plugins/plugin-gemini-cli/index.ts'), - '@truenine/plugin-git-exclude': resolve('src/plugins/plugin-git-exclude/index.ts'), + '@truenine/plugin-claude-code-cli': resolve('src/plugins/plugin-claude-code-cli.ts'), + '@truenine/plugin-cursor': resolve('src/plugins/plugin-cursor.ts'), + '@truenine/plugin-droid-cli': resolve('src/plugins/plugin-droid-cli.ts'), + '@truenine/plugin-editorconfig': resolve('src/plugins/plugin-editorconfig.ts'), + '@truenine/plugin-gemini-cli': resolve('src/plugins/plugin-gemini-cli.ts'), + '@truenine/plugin-git-exclude': resolve('src/plugins/plugin-git-exclude.ts'), '@truenine/plugin-input-agentskills': resolve('src/plugins/plugin-input-agentskills/index.ts'), '@truenine/plugin-input-editorconfig': resolve('src/plugins/plugin-input-editorconfig/index.ts'), '@truenine/plugin-input-fast-command': resolve('src/plugins/plugin-input-fast-command/index.ts'), @@ -36,20 +36,21 @@ const pluginAliases: Record = { '@truenine/plugin-input-subagent': resolve('src/plugins/plugin-input-subagent/index.ts'), '@truenine/plugin-input-vscode-config': resolve('src/plugins/plugin-input-vscode-config/index.ts'), '@truenine/plugin-input-workspace': resolve('src/plugins/plugin-input-workspace/index.ts'), - '@truenine/plugin-jetbrains-ai-codex': resolve('src/plugins/plugin-jetbrains-ai-codex/index.ts'), - '@truenine/plugin-jetbrains-codestyle': resolve('src/plugins/plugin-jetbrains-codestyle/index.ts'), - '@truenine/plugin-openai-codex-cli': resolve('src/plugins/plugin-openai-codex-cli/index.ts'), - '@truenine/plugin-opencode-cli': resolve('src/plugins/plugin-opencode-cli/index.ts'), - '@truenine/plugin-qoder-ide': resolve('src/plugins/plugin-qoder-ide/index.ts'), - '@truenine/plugin-readme': resolve('src/plugins/plugin-readme/index.ts'), - '@truenine/plugin-trae-ide': resolve('src/plugins/plugin-trae-ide/index.ts'), - '@truenine/plugin-vscode': resolve('src/plugins/plugin-vscode/index.ts'), - '@truenine/plugin-warp-ide': resolve('src/plugins/plugin-warp-ide/index.ts'), - '@truenine/plugin-windsurf': resolve('src/plugins/plugin-windsurf/index.ts') + '@truenine/plugin-jetbrains-ai-codex': resolve('src/plugins/plugin-jetbrains-ai-codex.ts'), + '@truenine/plugin-jetbrains-codestyle': resolve('src/plugins/plugin-jetbrains-codestyle.ts'), + '@truenine/plugin-openai-codex-cli': resolve('src/plugins/plugin-openai-codex-cli.ts'), + '@truenine/plugin-opencode-cli': resolve('src/plugins/plugin-opencode-cli.ts'), + '@truenine/plugin-qoder-ide': resolve('src/plugins/plugin-qoder-ide.ts'), + '@truenine/plugin-readme': resolve('src/plugins/plugin-readme.ts'), + '@truenine/plugin-trae-ide': resolve('src/plugins/plugin-trae-ide.ts'), + '@truenine/plugin-vscode': resolve('src/plugins/plugin-vscode.ts'), + '@truenine/plugin-warp-ide': resolve('src/plugins/plugin-warp-ide.ts'), + '@truenine/plugin-windsurf': resolve('src/plugins/plugin-windsurf.ts') } const noExternalDeps = [ '@truenine/logger', + '@truenine/script-runtime', 'fast-glob', '@truenine/desk-paths', '@truenine/md-compiler', @@ -98,6 +99,26 @@ export default defineConfig([ __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry } }, + { + entry: ['./src/script-runtime-worker.ts'], + platform: 'node', + sourcemap: false, + unbundle: false, + inlineOnly: false, + alias: { + '@': resolve('src'), + ...pluginAliases + }, + noExternal: noExternalDeps, + format: ['esm'], + minify: false, + dts: false, + define: { + __CLI_VERSION__: JSON.stringify(pkg.version), + __CLI_PACKAGE_NAME__: JSON.stringify(pkg.name), + __KIRO_GLOBAL_POWERS_REGISTRY__: kiroGlobalPowersRegistry + } + }, { entry: ['./src/globals.ts'], platform: 'node', diff --git a/cli/vite.config.ts b/cli/vite.config.ts index ecfd1c94..93f0d4fa 100644 --- a/cli/vite.config.ts +++ b/cli/vite.config.ts @@ -7,21 +7,21 @@ const pkg = JSON.parse(readFileSync('./package.json', 'utf8')) as {version: stri const kiroGlobalPowersRegistry = '{"version":"1.0.0","powers":{},"repoSources":{}}' const pluginAliases: Record = { - '@truenine/desk-paths': resolve('src/plugins/desk-paths/index.ts'), + '@truenine/desk-paths': resolve('src/plugins/desk-paths.ts'), '@truenine/plugin-output-shared': resolve('src/plugins/plugin-output-shared/index.ts'), '@truenine/plugin-output-shared/utils': resolve('src/plugins/plugin-output-shared/utils/index.ts'), '@truenine/plugin-output-shared/registry': resolve('src/plugins/plugin-output-shared/registry/index.ts'), '@truenine/plugin-input-shared': resolve('src/plugins/plugin-input-shared/index.ts'), '@truenine/plugin-input-shared/scope': resolve('src/plugins/plugin-input-shared/scope/index.ts'), - '@truenine/plugin-agentskills-compact': resolve('src/plugins/plugin-agentskills-compact/index.ts'), - '@truenine/plugin-agentsmd': resolve('src/plugins/plugin-agentsmd/index.ts'), + '@truenine/plugin-agentskills-compact': resolve('src/plugins/plugin-agentskills-compact.ts'), + '@truenine/plugin-agentsmd': resolve('src/plugins/plugin-agentsmd.ts'), '@truenine/plugin-antigravity': resolve('src/plugins/plugin-antigravity/index.ts'), - '@truenine/plugin-claude-code-cli': resolve('src/plugins/plugin-claude-code-cli/index.ts'), - '@truenine/plugin-cursor': resolve('src/plugins/plugin-cursor/index.ts'), - '@truenine/plugin-droid-cli': resolve('src/plugins/plugin-droid-cli/index.ts'), - '@truenine/plugin-editorconfig': resolve('src/plugins/plugin-editorconfig/index.ts'), - '@truenine/plugin-gemini-cli': resolve('src/plugins/plugin-gemini-cli/index.ts'), - '@truenine/plugin-git-exclude': resolve('src/plugins/plugin-git-exclude/index.ts'), + '@truenine/plugin-claude-code-cli': resolve('src/plugins/plugin-claude-code-cli.ts'), + '@truenine/plugin-cursor': resolve('src/plugins/plugin-cursor.ts'), + '@truenine/plugin-droid-cli': resolve('src/plugins/plugin-droid-cli.ts'), + '@truenine/plugin-editorconfig': resolve('src/plugins/plugin-editorconfig.ts'), + '@truenine/plugin-gemini-cli': resolve('src/plugins/plugin-gemini-cli.ts'), + '@truenine/plugin-git-exclude': resolve('src/plugins/plugin-git-exclude.ts'), '@truenine/plugin-input-agentskills': resolve('src/plugins/plugin-input-agentskills/index.ts'), '@truenine/plugin-input-editorconfig': resolve('src/plugins/plugin-input-editorconfig/index.ts'), '@truenine/plugin-input-fast-command': resolve('src/plugins/plugin-input-fast-command/index.ts'), @@ -40,16 +40,16 @@ const pluginAliases: Record = { '@truenine/plugin-input-subagent': resolve('src/plugins/plugin-input-subagent/index.ts'), '@truenine/plugin-input-vscode-config': resolve('src/plugins/plugin-input-vscode-config/index.ts'), '@truenine/plugin-input-workspace': resolve('src/plugins/plugin-input-workspace/index.ts'), - '@truenine/plugin-jetbrains-ai-codex': resolve('src/plugins/plugin-jetbrains-ai-codex/index.ts'), - '@truenine/plugin-jetbrains-codestyle': resolve('src/plugins/plugin-jetbrains-codestyle/index.ts'), - '@truenine/plugin-openai-codex-cli': resolve('src/plugins/plugin-openai-codex-cli/index.ts'), - '@truenine/plugin-opencode-cli': resolve('src/plugins/plugin-opencode-cli/index.ts'), - '@truenine/plugin-qoder-ide': resolve('src/plugins/plugin-qoder-ide/index.ts'), - '@truenine/plugin-readme': resolve('src/plugins/plugin-readme/index.ts'), - '@truenine/plugin-trae-ide': resolve('src/plugins/plugin-trae-ide/index.ts'), - '@truenine/plugin-vscode': resolve('src/plugins/plugin-vscode/index.ts'), - '@truenine/plugin-warp-ide': resolve('src/plugins/plugin-warp-ide/index.ts'), - '@truenine/plugin-windsurf': resolve('src/plugins/plugin-windsurf/index.ts') + '@truenine/plugin-jetbrains-ai-codex': resolve('src/plugins/plugin-jetbrains-ai-codex.ts'), + '@truenine/plugin-jetbrains-codestyle': resolve('src/plugins/plugin-jetbrains-codestyle.ts'), + '@truenine/plugin-openai-codex-cli': resolve('src/plugins/plugin-openai-codex-cli.ts'), + '@truenine/plugin-opencode-cli': resolve('src/plugins/plugin-opencode-cli.ts'), + '@truenine/plugin-qoder-ide': resolve('src/plugins/plugin-qoder-ide.ts'), + '@truenine/plugin-readme': resolve('src/plugins/plugin-readme.ts'), + '@truenine/plugin-trae-ide': resolve('src/plugins/plugin-trae-ide.ts'), + '@truenine/plugin-vscode': resolve('src/plugins/plugin-vscode.ts'), + '@truenine/plugin-warp-ide': resolve('src/plugins/plugin-warp-ide.ts'), + '@truenine/plugin-windsurf': resolve('src/plugins/plugin-windsurf.ts') } export default defineConfig({ diff --git a/doc/app/config.mdx b/doc/app/config.mdx index 809b1025..03cc91c8 100644 --- a/doc/app/config.mdx +++ b/doc/app/config.mdx @@ -44,11 +44,11 @@ lastUpdated: 2026-02-18 "subAgent": { "src": "src/agents", "dist": "dist/agents" }, "rule": { "src": "src/rules", "dist": "dist/rules" }, "globalMemory": { - "src": "app/global.cn.mdx", + "src": "app/global.src.mdx", "dist": "dist/global.mdx" }, "workspaceMemory": { - "src": "app/workspace.cn.mdx", + "src": "app/workspace.src.mdx", "dist": "dist/app/workspace.mdx" }, "project": { "src": "app", "dist": "dist/app" } diff --git a/doc/app/getting-started.mdx b/doc/app/getting-started.mdx index 7a2f4a82..83636dcf 100644 --- a/doc/app/getting-started.mdx +++ b/doc/app/getting-started.mdx @@ -46,7 +46,7 @@ import { Steps, Step } from './components/Steps'; 执行 tnmsc init(或未来提供的类似命令),生成示例配置与 Prompt 文件。 - 编辑 tnmsc.example.jsonapp/global.cn.mdx,根据自己的工作流调整内容。 + 编辑 tnmsc.example.jsonapp/global.src.mdx,根据自己的工作流调整内容。 diff --git a/doc/app/page.mdx b/doc/app/page.mdx index c4e3e1c0..6599925a 100644 --- a/doc/app/page.mdx +++ b/doc/app/page.mdx @@ -42,7 +42,7 @@ export const metadata = {
- 你可以把现有的 tnmsc.example.jsonglobal.cn.mdx{' '} + 你可以把现有的 tnmsc.example.jsonglobal.src.mdx{' '} 等内容按模块拆分成多篇文档,放在 app{' '} 目录中对应的路由(例如 /getting-started/cli, /gui)。
diff --git a/doc/package.json b/doc/package.json index b68ee1d4..dd13f992 100644 --- a/doc/package.json +++ b/doc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-docs", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "private": true, "description": "Documentation site for @truenine/memory-sync, built with Next.js 16 and MDX.", "engines": { diff --git a/gui/package.json b/gui/package.json index 2201098d..3bc82736 100644 --- a/gui/package.json +++ b/gui/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-gui", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "private": true, "engines": { "node": ">=25.2.1", @@ -11,7 +11,7 @@ "scripts": { "dev": "vite", "build": "pnpm run generate:icons && vite build --emptyOutDir && tsc --noEmit", - "generate:icons": "tauri icon src-tauri/icons/icon-source.png -o src-tauri/icons", + "generate:icons": "tsx scripts/generate-icons.ts", "preview": "vite preview", "tauri": "tauri", "tauri:dev": "tauri dev", diff --git a/gui/scripts/generate-icons.ts b/gui/scripts/generate-icons.ts new file mode 100644 index 00000000..c408eb76 --- /dev/null +++ b/gui/scripts/generate-icons.ts @@ -0,0 +1,27 @@ +import { execSync } from 'node:child_process' +import { dirname, join } from 'node:path' +import { fileURLToPath } from 'node:url' + +const __dirname = dirname(fileURLToPath(import.meta.url)) +const rootDir = join(__dirname, '..') +const iconsDir = join(rootDir, 'src-tauri', 'icons') +const sourceIcon = join(iconsDir, 'icon-source.png') + +async function main() { + try { + execSync( + `tauri icon "${sourceIcon}" -o "${iconsDir}"`, + { + cwd: rootDir, + stdio: 'pipe', // 抑制输出 + encoding: 'utf-8', + } + ) + console.log('✓ Icons generated successfully') + } catch (error) { + console.error('✗ Failed to generate icons') + process.exit(1) + } +} + +main() diff --git a/gui/src-tauri/Cargo.toml b/gui/src-tauri/Cargo.toml index 10a0c4fd..a01f5651 100644 --- a/gui/src-tauri/Cargo.toml +++ b/gui/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "memory-sync-gui" -version = "2026.10303.11117" +version = "2026.10314.10606" description = "Memory Sync desktop GUI application" authors.workspace = true edition.workspace = true diff --git a/gui/src-tauri/src/commands.rs b/gui/src-tauri/src/commands.rs index 02ea0c07..fb109fb5 100644 --- a/gui/src-tauri/src/commands.rs +++ b/gui/src-tauri/src/commands.rs @@ -5,11 +5,32 @@ /// internally via `tnmsc::run_bridge_command`, but the GUI no longer searches for or /// invokes the CLI binary as a sidecar. -use std::path::Path; +use std::path::{Path, PathBuf}; use std::process::Command as StdCommand; use serde::{Deserialize, Serialize}; +const PRIMARY_SOURCE_MDX_EXTENSION: &str = ".src.mdx"; +const SOURCE_MDX_FILE_TYPE: &str = "sourceMdx"; +const DEFAULT_AINDEX_DIR: &str = "aindex"; +const DEFAULT_SKILLS_SRC_DIR: &str = "skills"; +const DEFAULT_SKILLS_DIST_DIR: &str = "dist/skills"; +const DEFAULT_COMMANDS_SRC_DIR: &str = "commands"; +const DEFAULT_COMMANDS_DIST_DIR: &str = "dist/commands"; +const DEFAULT_SUB_AGENTS_SRC_DIR: &str = "subagents"; +const DEFAULT_SUB_AGENTS_DIST_DIR: &str = "dist/subagents"; +const DEFAULT_RULES_SRC_DIR: &str = "rules"; +const DEFAULT_RULES_DIST_DIR: &str = "dist/rules"; + +fn has_source_mdx_extension(name: &str) -> bool { + name.ends_with(PRIMARY_SOURCE_MDX_EXTENSION) +} + +fn replace_source_mdx_extension(path: &str) -> Option { + path.strip_suffix(PRIMARY_SOURCE_MDX_EXTENSION) + .map(|without_extension| format!("{without_extension}.mdx")) +} + // --------------------------------------------------------------------------- // Data structures // --------------------------------------------------------------------------- @@ -210,82 +231,122 @@ pub fn open_config_dir() -> Result { #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct AindexFileEntry { - /// Relative path from aindex root, e.g. "app/TrueNine/agt.cn.mdx" + /// Relative path from aindex root, e.g. "app/TrueNine/agt.src.mdx" pub source_path: String, /// Relative path of translated file (empty for resource files) pub translated_path: String, /// Whether the translated file exists on disk pub translated_exists: bool, - /// "cnMdx" for .cn.mdx source+translated pairs, "resource" for other files + /// "sourceMdx" for source+translated pairs, "resource" for other files pub file_type: String, } /// Parsed global config with resolved paths. struct ResolvedConfig { - shadow_source_project: String, - cfg: serde_json::Value, + aindex_root: PathBuf, + config: tnmsc::core::config::UserConfigFile, +} + +struct CategoryPaths { + source_rel: String, + translated_rel: String, } -/// Read and resolve the global config. -fn load_resolved_config() -> Result { - let config_path = { - let home = dirs::home_dir().ok_or("Cannot determine home directory")?; - home.join(".aindex").join(".tnmsc.json") +fn resolve_category_paths( + config: &tnmsc::core::config::UserConfigFile, + category: &str, +) -> Result { + let aindex = config.aindex.as_ref(); + + let resolve_pair = | + pair: Option<&tnmsc::core::config::DirPair>, + default_source: &str, + default_translated: &str, + | -> CategoryPaths { + CategoryPaths { + source_rel: pair + .and_then(|value| value.src.as_deref()) + .unwrap_or(default_source) + .to_string(), + translated_rel: pair + .and_then(|value| value.dist.as_deref()) + .unwrap_or(default_translated) + .to_string(), + } }; - if !config_path.exists() { - return Err("Global config not found.".into()); + + match category { + "skills" => Ok(resolve_pair( + aindex.and_then(|value| value.skills.as_ref()), + DEFAULT_SKILLS_SRC_DIR, + DEFAULT_SKILLS_DIST_DIR, + )), + "commands" => Ok(resolve_pair( + aindex.and_then(|value| value.commands.as_ref()), + DEFAULT_COMMANDS_SRC_DIR, + DEFAULT_COMMANDS_DIST_DIR, + )), + "agents" => Ok(resolve_pair( + aindex.and_then(|value| value.sub_agents.as_ref()), + DEFAULT_SUB_AGENTS_SRC_DIR, + DEFAULT_SUB_AGENTS_DIST_DIR, + )), + "rules" => Ok(resolve_pair( + aindex.and_then(|value| value.rules.as_ref()), + DEFAULT_RULES_SRC_DIR, + DEFAULT_RULES_DIST_DIR, + )), + _ => Err(format!("Unknown category: {category}")), } - let raw = std::fs::read_to_string(&config_path) - .map_err(|e| format!("Failed to read config: {e}"))?; - let cfg: serde_json::Value = serde_json::from_str(&raw) - .map_err(|e| format!("Failed to parse config: {e}"))?; - - let workspace_raw = cfg.get("workspaceDir") - .and_then(|v| v.as_str()) - .unwrap_or("."); - let home = dirs::home_dir() - .map(|h| h.to_string_lossy().to_string()) - .unwrap_or_default(); - let workspace = workspace_raw.replace('~', &home); - - let shadow_name = cfg - .get("shadowSourceProject") - .and_then(|v| v.get("name")) - .and_then(|v| v.as_str()) - .unwrap_or("tnmsc-shadow"); - let shadow_source_project = format!("{workspace}/{shadow_name}"); - - Ok(ResolvedConfig { shadow_source_project, cfg }) -} - - -/// Read the global config and resolve the shadowSourceProjectDir path. -fn resolve_aindex_root() -> Result { - let rc = load_resolved_config()?; - let path = std::path::PathBuf::from(&rc.shadow_source_project); +} + +/// Read and resolve the merged tnmsc config for the current working directory. +fn load_resolved_config(cwd: &str) -> Result { + let result = tnmsc::load_config(Path::new(cwd)) + .map_err(|e| format!("Failed to load config: {e}"))?; + let config = result.config; + let workspace_dir = config.workspace_dir.as_deref().unwrap_or("."); + let workspace_dir = tnmsc::core::config::resolve_tilde(workspace_dir); + let aindex_dir = config + .aindex + .as_ref() + .and_then(|value| value.dir.as_deref()) + .unwrap_or(DEFAULT_AINDEX_DIR); + + Ok(ResolvedConfig { + aindex_root: workspace_dir.join(aindex_dir), + config, + }) +} + + +/// Read the merged config and resolve the aindex root path. +fn resolve_aindex_root(cwd: &str) -> Result { + let rc = load_resolved_config(cwd)?; + let path = rc.aindex_root; if !path.exists() { return Err(format!("Aindex directory not found: {}", path.display())); } Ok(path) } -/// Recursively collect all `.cn.mdx` source files under `aindex/app/`. +/// Recursively collect all source prompt files under `aindex/app/`. #[tauri::command] -pub fn list_aindex_files(_cwd: String) -> Result, String> { - let base = resolve_aindex_root()?; +pub fn list_aindex_files(cwd: String) -> Result, String> { + let base = resolve_aindex_root(&cwd)?; let app_dir = base.join("app"); if !app_dir.exists() { return Ok(vec![]); } let mut entries = Vec::new(); - collect_cn_mdx(&app_dir, &base, &mut entries) + collect_source_mdx(&app_dir, &base, &mut entries) .map_err(|e| format!("Failed to scan aindex: {e}"))?; entries.sort_by(|a, b| a.source_path.cmp(&b.source_path)); Ok(entries) } -fn collect_cn_mdx( +fn collect_source_mdx( dir: &std::path::Path, base: &std::path::Path, out: &mut Vec, @@ -294,15 +355,16 @@ fn collect_cn_mdx( let entry = entry?; let path = entry.path(); if path.is_dir() { - collect_cn_mdx(&path, base, out)?; + collect_source_mdx(&path, base, out)?; } else if let Some(name) = path.file_name().and_then(|n| n.to_str()) { - if name.ends_with(".cn.mdx") { + if has_source_mdx_extension(name) { let rel = path.strip_prefix(base).unwrap_or(&path); let source_path = rel.to_string_lossy().replace('\\', "/"); // Determine translated path: - // - app/global.cn.mdx -> dist/global.mdx (root-level files) - // - app/X/foo.cn.mdx -> dist/app/X/foo.mdx (subdirectory files) - let without_ext = source_path.replace(".cn.mdx", ".mdx"); + // - app/global.src.mdx -> dist/global.mdx (root-level files) + // - app/X/foo.src.mdx -> dist/app/X/foo.mdx (subdirectory files) + let without_ext = replace_source_mdx_extension(&source_path) + .unwrap_or_else(|| source_path.clone()); let translated_rel = if without_ext.starts_with("app/") { let after_app = &without_ext["app/".len()..]; if after_app.contains('/') { @@ -320,7 +382,7 @@ fn collect_cn_mdx( source_path, translated_path: translated_rel, translated_exists: translated_abs.exists(), - file_type: "cnMdx".to_string(), + file_type: SOURCE_MDX_FILE_TYPE.to_string(), }); } } @@ -330,8 +392,8 @@ fn collect_cn_mdx( /// Read a file relative to the aindex directory (resolved from config). #[tauri::command] -pub fn read_aindex_file(_cwd: String, rel_path: String) -> Result { - let base = resolve_aindex_root()?; +pub fn read_aindex_file(cwd: String, rel_path: String) -> Result { + let base = resolve_aindex_root(&cwd)?; let path = base.join(&rel_path); if !path.exists() { return Ok(String::new()); @@ -342,8 +404,8 @@ pub fn read_aindex_file(_cwd: String, rel_path: String) -> Result Result<(), String> { - let base = resolve_aindex_root()?; +pub fn write_aindex_file(cwd: String, rel_path: String, content: String) -> Result<(), String> { + let base = resolve_aindex_root(&cwd)?; let path = base.join(&rel_path); if let Some(parent) = path.parent() { std::fs::create_dir_all(parent) @@ -353,46 +415,14 @@ pub fn write_aindex_file(_cwd: String, rel_path: String, content: String) -> Res .map_err(|e| format!("Failed to write {}: {e}", path.display())) } -/// List `.cn.mdx` source files for a given category (skills, commands, agents). -/// Reads the corresponding config field to resolve the source directory, -/// then maps translated files to `dist/{category}/`. +/// List source prompt files for a given category (skills, commands, agents). +/// Reads the corresponding `aindex` config field to resolve source and output directories. #[tauri::command] -pub fn list_category_files(_cwd: String, category: String) -> Result, String> { - let rc = load_resolved_config()?; - let base = std::path::PathBuf::from(&rc.shadow_source_project); - - // Map category name to the dist subpath key within shadowSourceProject - let (src_key, dist_key) = match category.as_str() { - "skills" => ("skill", "skill"), - "commands" => ("fastCommand", "fastCommand"), - "agents" => ("subAgent", "subAgent"), - "rules" => ("rule", "rule"), - _ => return Err(format!("Unknown category: {category}")), - }; - - let ssp = rc.cfg.get("shadowSourceProject"); - - // Read dist path from nested config, fall back to dist/{category} - let dist_rel = ssp - .and_then(|v| v.get(dist_key)) - .and_then(|v| v.get("dist")) - .and_then(|v| v.as_str()) - .unwrap_or(&format!("dist/{category}")) - .to_string(); - - // This is the OUTPUT (dist) directory — translated files live here - let dist_dir = base.join(&dist_rel); - - // Read src path from nested config, fall back to src/{category} - let src_rel = ssp - .and_then(|v| v.get(src_key)) - .and_then(|v| v.get("src")) - .and_then(|v| v.as_str()) - .unwrap_or(&format!("src/{category}")) - .to_string(); - - // Source files live under src/{category}/ relative to aindex root - let src_dir = base.join(&src_rel); +pub fn list_category_files(cwd: String, category: String) -> Result, String> { + let ResolvedConfig { aindex_root: base, config } = load_resolved_config(&cwd)?; + let paths = resolve_category_paths(&config, &category)?; + let dist_dir = base.join(&paths.translated_rel); + let src_dir = base.join(&paths.source_rel); if !src_dir.exists() { return Ok(vec![]); @@ -403,7 +433,14 @@ pub fn list_category_files(_cwd: String, category: String) -> Result Result, ) -> std::io::Result<()> { @@ -424,28 +461,30 @@ fn collect_category_cn_mdx( let entry = entry?; let path = entry.path(); if path.is_dir() { - collect_category_cn_mdx(&path, src_root, category, base, dist_dir, out)?; + collect_category_source_mdx(&path, src_root, base, translated_root_rel, dist_dir, out)?; } else if let Some(name) = path.file_name().and_then(|n| n.to_str()) { let rel = path.strip_prefix(base).unwrap_or(&path); let source_path = rel.to_string_lossy().replace('\\', "/"); - if name.ends_with(".cn.mdx") { + if has_source_mdx_extension(name) { // Source + translated pair let rel_from_src = path.strip_prefix(src_root).unwrap_or(&path); let rel_str = rel_from_src.to_string_lossy().replace('\\', "/") - .replace(".cn.mdx", ".mdx"); + .to_string(); + let rel_str = replace_source_mdx_extension(&rel_str) + .unwrap_or(rel_str); let translated_abs = dist_dir.join(&rel_str); let translated_path = translated_abs.strip_prefix(base) .map(|p| p.to_string_lossy().replace('\\', "/")) - .unwrap_or_else(|_| format!("dist/{}/{}", category, rel_str)); + .unwrap_or_else(|_| format!("{}/{}", translated_root_rel.trim_end_matches('/'), rel_str)); out.push(AindexFileEntry { source_path, translated_path, translated_exists: translated_abs.exists(), - file_type: "cnMdx".to_string(), + file_type: SOURCE_MDX_FILE_TYPE.to_string(), }); - } else { + } else if !name.ends_with(".mdx") { // Resource file — single preview only out.push(AindexFileEntry { source_path, @@ -471,7 +510,7 @@ pub struct CategoryStats { pub file_count: u32, pub total_chars: u64, pub total_lines: u64, - pub cn_mdx_count: u32, + pub source_mdx_count: u32, pub resource_count: u32, pub translated_count: u32, } @@ -493,7 +532,7 @@ pub struct AindexStats { pub total_files: u32, pub total_chars: u64, pub total_lines: u64, - pub total_cn_mdx: u32, + pub total_source_mdx: u32, pub total_resources: u32, pub total_translated: u32, pub categories: Vec, @@ -514,7 +553,7 @@ fn stat_dir(dir: &std::path::Path) -> (u32, u64, u64, u32, u32, u32, std::collec let mut file_count = 0u32; let mut total_chars = 0u64; let mut total_lines = 0u64; - let mut cn_mdx = 0u32; + let mut source_mdx = 0u32; let mut resource = 0u32; let mut translated = 0u32; let mut ext_map: std::collections::HashMap = std::collections::HashMap::new(); @@ -527,7 +566,7 @@ fn stat_dir(dir: &std::path::Path) -> (u32, u64, u64, u32, u32, u32, std::collec file_count += fc; total_chars += tc; total_lines += tl; - cn_mdx += cm; + source_mdx += cm; resource += rc; translated += tr; for (k, v) in em { @@ -540,9 +579,9 @@ fn stat_dir(dir: &std::path::Path) -> (u32, u64, u64, u32, u32, u32, std::collec total_lines += content.lines().count() as u64; } let name = path.file_name().and_then(|n| n.to_str()).unwrap_or(""); - if name.ends_with(".cn.mdx") { - cn_mdx += 1; - *ext_map.entry("cn.mdx".to_string()).or_default() += 1; + if has_source_mdx_extension(name) { + source_mdx += 1; + *ext_map.entry("src.mdx".to_string()).or_default() += 1; } else { // Extract extension let ext = name.rsplit('.').next().unwrap_or("other").to_lowercase(); @@ -551,13 +590,13 @@ fn stat_dir(dir: &std::path::Path) -> (u32, u64, u64, u32, u32, u32, std::collec } } } - (file_count, total_chars, total_lines, cn_mdx, resource, translated, ext_map) + (file_count, total_chars, total_lines, source_mdx, resource, translated, ext_map) } /// Gather comprehensive statistics about the aindex project. #[tauri::command] -pub fn get_aindex_stats(_cwd: String) -> Result { - let base = resolve_aindex_root()?; +pub fn get_aindex_stats(cwd: String) -> Result { + let ResolvedConfig { aindex_root: base, config } = load_resolved_config(&cwd)?; let mut stats = AindexStats::default(); let mut all_ext: std::collections::HashMap = std::collections::HashMap::new(); @@ -579,7 +618,7 @@ pub fn get_aindex_stats(_cwd: String) -> Result { stats.total_files += fc; stats.total_chars += tc; stats.total_lines += tl; - stats.total_cn_mdx += cm; + stats.total_source_mdx += cm; stats.total_resources += rc; for (k, v) in em { *all_ext.entry(k).or_default() += v; @@ -589,9 +628,10 @@ pub fn get_aindex_stats(_cwd: String) -> Result { } } - // Scan src/skills, src/commands, src/agents + // Scan configured source directories for skills, commands, agents for cat_name in &["skills", "commands", "agents"] { - let src_dir = base.join("src").join(cat_name); + let category_paths = resolve_category_paths(&config, cat_name)?; + let src_dir = base.join(&category_paths.source_rel); if !src_dir.exists() { stats.categories.push(CategoryStats { name: cat_name.to_string(), @@ -605,14 +645,14 @@ pub fn get_aindex_stats(_cwd: String) -> Result { file_count: fc, total_chars: tc, total_lines: tl, - cn_mdx_count: cm, + source_mdx_count: cm, resource_count: rc, translated_count: 0, }); stats.total_files += fc; stats.total_chars += tc; stats.total_lines += tl; - stats.total_cn_mdx += cm; + stats.total_source_mdx += cm; stats.total_resources += rc; for (k, v) in em { *all_ext.entry(k).or_default() += v; diff --git a/gui/src-tauri/tauri.conf.json b/gui/src-tauri/tauri.conf.json index 6b818f44..40165d38 100644 --- a/gui/src-tauri/tauri.conf.json +++ b/gui/src-tauri/tauri.conf.json @@ -1,6 +1,6 @@ { "$schema": "https://schema.tauri.app/config/2", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "productName": "Memory Sync", "identifier": "org.truenine.memory-sync", "build": { @@ -23,6 +23,7 @@ }, "bundle": { "active": true, + "createUpdaterArtifacts": true, "targets": [ "nsis", "deb", @@ -49,7 +50,7 @@ "endpoints": [ "https://releases.truenine.org/memory-sync/{{target}}/{{arch}}/{{current_version}}" ], - "pubkey": "" + "pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IDNFQzM2Qjc0MEVCNUY2ODAKUldTQTlyVU9kR3ZEUGpRUDQ3OUF4VE1XcjZGYjJwNjhwYmYwQVZmTmgzQjRKMjVrUExmOVdsd2YK" } } } diff --git a/gui/src/api/bridge.ts b/gui/src/api/bridge.ts index 27f3ba0f..62afc6f5 100644 --- a/gui/src/api/bridge.ts +++ b/gui/src/api/bridge.ts @@ -61,7 +61,7 @@ export interface AindexFileEntry { readonly sourcePath: string readonly translatedPath: string readonly translatedExists: boolean - readonly fileType: 'cnMdx' | 'resource' + readonly fileType: 'sourceMdx' | 'resource' } export function listAindexFiles(cwd: string): Promise { @@ -91,7 +91,7 @@ export interface CategoryStats { readonly fileCount: number readonly totalChars: number readonly totalLines: number - readonly cnMdxCount: number + readonly sourceMdxCount: number readonly resourceCount: number readonly translatedCount: number } @@ -107,7 +107,7 @@ export interface AindexStats { readonly totalFiles: number readonly totalChars: number readonly totalLines: number - readonly totalCnMdx: number + readonly totalSourceMdx: number readonly totalResources: number readonly totalTranslated: number readonly categories: readonly CategoryStats[] diff --git a/gui/src/i18n/en-US.json b/gui/src/i18n/en-US.json index 418923e7..89147bcb 100644 --- a/gui/src/i18n/en-US.json +++ b/gui/src/i18n/en-US.json @@ -32,8 +32,8 @@ "config.openDir": "Open Config Dir", "config.field.workspaceDir": "Workspace Dir", "config.field.workspaceDir.desc": "Root workspace directory path", - "config.field.aindex.name": "Aindex Name", - "config.field.aindex.name.desc": "Folder name of the aindex (inside workspace dir)", + "config.field.aindex.dir": "Aindex Dir", + "config.field.aindex.dir.desc": "Directory name of the aindex inside the workspace", "config.field.logLevel": "Log Level", "config.field.logLevel.desc": "CLI log output level", "plugins.title": "Plugins", @@ -84,7 +84,7 @@ "dashboard.stats.totalFiles": "Total Files", "dashboard.stats.totalChars": "Total Characters", "dashboard.stats.totalLines": "Total Lines", - "dashboard.stats.cnMdx": "Source Files (.cn.mdx)", + "dashboard.stats.sourceMdx": "Source Files (.src.mdx)", "dashboard.stats.translated": "Translated Files", "dashboard.stats.projects": "Project Distribution", "dashboard.stats.categories": "Category Overview", diff --git a/gui/src/i18n/zh-CN.json b/gui/src/i18n/zh-CN.json index 7a71802c..3469090a 100644 --- a/gui/src/i18n/zh-CN.json +++ b/gui/src/i18n/zh-CN.json @@ -32,8 +32,8 @@ "config.openDir": "打开配置目录", "config.field.workspaceDir": "工作区目录", "config.field.workspaceDir.desc": "工作区根目录路径", - "config.field.aindex.name": "Aindex 名称", - "config.field.aindex.name.desc": "Aindex 的文件夹名称(位于工作区目录下)", + "config.field.aindex.dir": "Aindex 目录名", + "config.field.aindex.dir.desc": "工作区内 aindex 目录的名称", "config.field.logLevel": "日志级别", "config.field.logLevel.desc": "CLI 日志输出级别", "plugins.title": "插件列表", @@ -84,7 +84,7 @@ "dashboard.stats.totalFiles": "总文件数", "dashboard.stats.totalChars": "总字符数", "dashboard.stats.totalLines": "总行数", - "dashboard.stats.cnMdx": "源文件 (.cn.mdx)", + "dashboard.stats.sourceMdx": "源文件 (.src.mdx)", "dashboard.stats.translated": "已翻译文件", "dashboard.stats.projects": "项目分布", "dashboard.stats.categories": "分类概览", diff --git a/gui/src/pages/ConfigPage.tsx b/gui/src/pages/ConfigPage.tsx index 176a05b7..3999d850 100644 --- a/gui/src/pages/ConfigPage.tsx +++ b/gui/src/pages/ConfigPage.tsx @@ -109,10 +109,10 @@ const ConfigForm: FC = ({ data, onChange, t }) => { ))} updateNestedField('aindex', 'name', v)} + label={t('config.field.aindex.dir')} + description={t('config.field.aindex.dir.desc')} + value={(aindex['dir'] as string) ?? ''} + onChange={(v) => updateNestedField('aindex', 'dir', v)} placeholder="aindex" /> diff --git a/gui/src/pages/DashboardPage.tsx b/gui/src/pages/DashboardPage.tsx index c84b49c3..422d4d5a 100644 --- a/gui/src/pages/DashboardPage.tsx +++ b/gui/src/pages/DashboardPage.tsx @@ -200,7 +200,7 @@ const DashboardPage: FC = () => {
- +
diff --git a/gui/src/pages/FilesPage.tsx b/gui/src/pages/FilesPage.tsx index a22b22d6..7c5967de 100644 --- a/gui/src/pages/FilesPage.tsx +++ b/gui/src/pages/FilesPage.tsx @@ -27,7 +27,7 @@ loader.config({ monaco }) registerVitesseThemes() // Register mdx as a language aliased to markdown for syntax highlighting -monaco.languages.register({ id: 'mdx', extensions: ['.mdx', '.cn.mdx'], aliases: ['MDX'] }) +monaco.languages.register({ id: 'mdx', extensions: ['.mdx', '.src.mdx'], aliases: ['MDX'] }) // Use markdown tokenizer for mdx const mdLangDef = (monaco.languages as unknown as Record)['_languages'] if (!mdLangDef) { @@ -279,7 +279,15 @@ const CATEGORY_TABS: readonly { readonly value: FileCategory; readonly labelKey: /** Root prefix for tree building per category */ function categoryRootPrefix(cat: FileCategory): string { - return cat === 'projects' ? 'app' : `src/${cat}` + if (cat === 'projects') { + return 'app' + } + + if (cat === 'agents') { + return 'subagents' + } + + return cat } // --------------------------------------------------------------------------- @@ -351,7 +359,11 @@ const FilesPage: FC = () => { useEffect(() => { fetchFiles() }, [fetchFiles]) - const tree = useMemo(() => buildTree(files, categoryRootPrefix(category)), [files, category]) + const treeRootPrefix = useMemo( + () => files[0]?.sourcePath.split('/')[0] ?? categoryRootPrefix(category), + [files, category] + ) + const tree = useMemo(() => buildTree(files, treeRootPrefix), [files, treeRootPrefix]) const handleSelect = useCallback(async (entry: AindexFileEntry) => { setSelected(entry) @@ -488,7 +500,7 @@ const FilesPage: FC = () => { readOnly /> ) : ( - /* Dual pane for .cn.mdx source + translated */ + /* Dual pane for .src.mdx source + translated */ <> { /** * **Validates: Requirements 3.4** * - * For any object field (profile, tool, fastCommandSeriesOptions) + * For any object field (profile, commandSeriesOptions, outputScopes) * set to a non-object value, validateConfig should return at least * one error for that field. */ diff --git a/gui/src/utils/configValidation.test.ts b/gui/src/utils/configValidation.test.ts index a56ae15e..b7626155 100644 --- a/gui/src/utils/configValidation.test.ts +++ b/gui/src/utils/configValidation.test.ts @@ -93,8 +93,8 @@ describe('validateConfig — aindex', () => { commands: { src: 'commands', dist: 'dist/commands' }, subAgents: { src: 'subagents', dist: 'dist/subagents' }, rules: { src: 'rules', dist: 'dist/rules' }, - globalPrompt: { src: 'global.cn.mdx', dist: 'dist/global.mdx' }, - workspacePrompt: { src: 'workspace.cn.mdx', dist: 'dist/workspace.mdx' }, + globalPrompt: { src: 'global.src.mdx', dist: 'dist/global.mdx' }, + workspacePrompt: { src: 'workspace.src.mdx', dist: 'dist/workspace.mdx' }, app: { src: 'app', dist: 'dist/app' }, ext: { src: 'ext', dist: 'dist/ext' }, arch: { src: 'arch', dist: 'dist/arch' }, @@ -161,56 +161,47 @@ describe('validateConfig — profile', () => { }) }) -// ─── tool ────────────────────────────────────────────────────────────── -describe('validateConfig — tool', () => { - it('accepts object with string values', () => { - expect(validateConfig({ tool: { a: 'x', b: 'y' } })).toHaveLength(0) - }) - - it('accepts object with undefined values', () => { - expect(validateConfig({ tool: { a: undefined } })).toHaveLength(0) +// ─── commandSeriesOptions ─────────────────────────────────────────────── +describe('validateConfig — commandSeriesOptions', () => { + it('accepts a plain object', () => { + expect(validateConfig({ commandSeriesOptions: { includeSeriesPrefix: true } })).toHaveLength(0) }) it('rejects non-object', () => { - const errors = validateConfig({ tool: 'bad' }) - expect(errorFields(errors)).toContain('tool') + const errors = validateConfig({ commandSeriesOptions: 42 }) + expect(errorFields(errors)).toContain('commandSeriesOptions') }) it('rejects array', () => { - const errors = validateConfig({ tool: [] }) - expect(errorFields(errors)).toContain('tool') + const errors = validateConfig({ commandSeriesOptions: [] }) + expect(errorFields(errors)).toContain('commandSeriesOptions') }) it('rejects null', () => { - const errors = validateConfig({ tool: null }) - expect(errorFields(errors)).toContain('tool') - }) - - it('rejects non-string values inside tool', () => { - const errors = validateConfig({ tool: { a: 123 } }) - expect(errorFields(errors)).toContain('tool.a') + const errors = validateConfig({ commandSeriesOptions: null }) + expect(errorFields(errors)).toContain('commandSeriesOptions') }) }) -// ─── fastCommandSeriesOptions ────────────────────────────────────────── -describe('validateConfig — fastCommandSeriesOptions', () => { +// ─── outputScopes ─────────────────────────────────────────────────────── +describe('validateConfig — outputScopes', () => { it('accepts a plain object', () => { - expect(validateConfig({ fastCommandSeriesOptions: { includeSeriesPrefix: true } })).toHaveLength(0) + expect(validateConfig({ outputScopes: { plugins: {} } })).toHaveLength(0) }) it('rejects non-object', () => { - const errors = validateConfig({ fastCommandSeriesOptions: 42 }) - expect(errorFields(errors)).toContain('fastCommandSeriesOptions') + const errors = validateConfig({ outputScopes: 42 }) + expect(errorFields(errors)).toContain('outputScopes') }) it('rejects array', () => { - const errors = validateConfig({ fastCommandSeriesOptions: [] }) - expect(errorFields(errors)).toContain('fastCommandSeriesOptions') + const errors = validateConfig({ outputScopes: [] }) + expect(errorFields(errors)).toContain('outputScopes') }) it('rejects null', () => { - const errors = validateConfig({ fastCommandSeriesOptions: null }) - expect(errorFields(errors)).toContain('fastCommandSeriesOptions') + const errors = validateConfig({ outputScopes: null }) + expect(errorFields(errors)).toContain('outputScopes') }) }) @@ -241,6 +232,16 @@ describe('validateConfig — unknown fields', () => { const errors = validateConfig({ excludePatterns: {} }) expect(warningFields(errors)).toContain('excludePatterns') }) + + it('warns on removed shadowSourceProject field', () => { + const errors = validateConfig({ shadowSourceProject: { name: 'legacy-shadow' } }) + expect(warningFields(errors)).toContain('shadowSourceProject') + }) + + it('warns on removed fastCommandSeriesOptions field', () => { + const errors = validateConfig({ fastCommandSeriesOptions: { includeSeriesPrefix: true } }) + expect(warningFields(errors)).toContain('fastCommandSeriesOptions') + }) }) // ─── combined / realistic configs ────────────────────────────────────── @@ -254,15 +255,16 @@ describe('validateConfig — realistic configs', () => { commands: { src: 'commands', dist: 'dist/commands' }, subAgents: { src: 'subagents', dist: 'dist/subagents' }, rules: { src: 'rules', dist: 'dist/rules' }, - globalPrompt: { src: 'global.cn.mdx', dist: 'dist/global.mdx' }, - workspacePrompt: { src: 'workspace.cn.mdx', dist: 'dist/workspace.mdx' }, + globalPrompt: { src: 'global.src.mdx', dist: 'dist/global.mdx' }, + workspacePrompt: { src: 'workspace.src.mdx', dist: 'dist/workspace.mdx' }, app: { src: 'app', dist: 'dist/app' }, ext: { src: 'ext', dist: 'dist/ext' }, arch: { src: 'arch', dist: 'dist/arch' }, }, logLevel: 'debug', profile: { name: 'test' }, - tool: { editor: 'vscode' }, + commandSeriesOptions: { includeSeriesPrefix: true }, + outputScopes: { plugins: {} }, } expect(validateConfig(config)).toHaveLength(0) }) diff --git a/gui/src/utils/configValidation.ts b/gui/src/utils/configValidation.ts index e53df4f9..e9957464 100644 --- a/gui/src/utils/configValidation.ts +++ b/gui/src/utils/configValidation.ts @@ -10,7 +10,7 @@ export type ValidationSeverity = 'error' | 'warning' export interface ValidationError { - /** Dot-separated path to the offending field, e.g. "shadowSourceProject.skill.src" */ + /** Dot-separated path to the offending field, e.g. "aindex.skills.src" */ readonly field: string /** Human-readable description of the problem */ readonly message: string @@ -26,11 +26,10 @@ const KNOWN_FIELDS: ReadonlySet = new Set([ 'version', 'workspaceDir', 'aindex', - 'shadowSourceProject', 'logLevel', - 'fastCommandSeriesOptions', + 'commandSeriesOptions', + 'outputScopes', 'profile', - 'tool', ]) const VALID_LOG_LEVELS: ReadonlySet = new Set([ @@ -155,32 +154,25 @@ export function validateConfig(raw: unknown): readonly ValidationError[] { } } - // ── tool ───────────────────────────────────────────────────────────── - if ('tool' in obj) { - const v = obj['tool'] + // ── commandSeriesOptions ───────────────────────────────────────────── + if ('commandSeriesOptions' in obj) { + const v = obj['commandSeriesOptions'] if (typeof v !== 'object' || v === null || Array.isArray(v)) { - errors.push({ field: 'tool', message: 'tool must be an object', severity: 'error' }) - } else { - const toolObj = v as Record - for (const [key, value] of Object.entries(toolObj)) { - if (typeof value !== 'string' && value !== undefined) { - errors.push({ - field: `tool.${key}`, - message: `tool.${key} must be a string`, - severity: 'error', - }) - } - } + errors.push({ + field: 'commandSeriesOptions', + message: 'commandSeriesOptions must be an object', + severity: 'error', + }) } } - // ── fastCommandSeriesOptions ───────────────────────────────────────── - if ('fastCommandSeriesOptions' in obj) { - const v = obj['fastCommandSeriesOptions'] + // ── outputScopes ───────────────────────────────────────────────────── + if ('outputScopes' in obj) { + const v = obj['outputScopes'] if (typeof v !== 'object' || v === null || Array.isArray(v)) { errors.push({ - field: 'fastCommandSeriesOptions', - message: 'fastCommandSeriesOptions must be an object', + field: 'outputScopes', + message: 'outputScopes must be an object', severity: 'error', }) } diff --git a/libraries/logger/Cargo.toml b/libraries/logger/Cargo.toml index c9500744..f97cc63f 100644 --- a/libraries/logger/Cargo.toml +++ b/libraries/logger/Cargo.toml @@ -15,6 +15,7 @@ default = [] napi = ["dep:napi", "dep:napi-derive"] [dependencies] +chrono = { version = "0.4", default-features = false, features = ["clock", "std"] } serde = { workspace = true } serde_json = { workspace = true } napi = { workspace = true, optional = true } diff --git a/libraries/logger/package.json b/libraries/logger/package.json index 3491d933..990f60d6 100644 --- a/libraries/logger/package.json +++ b/libraries/logger/package.json @@ -1,9 +1,9 @@ { "name": "@truenine/logger", "type": "module", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "private": true, - "description": "Rust-powered structured logger for Node.js with pure-TS fallback", + "description": "Rust-powered structured logger for Node.js via N-API", "license": "AGPL-3.0-only", "exports": { "./package.json": "./package.json", @@ -28,11 +28,11 @@ ] }, "scripts": { - "build": "tsdown", - "build:all": "run-s build:native build", + "build": "run-s build:ts build:native", + "build:all": "run-s build:ts build:native", "build:native": "napi build --platform --release --output-dir dist -- --features napi", "build:native:debug": "napi build --platform --output-dir dist -- --features napi", - "build:ts": "tsdown", + "build:ts": "tsx ../../scripts/build-quiet.ts", "check": "run-p typecheck lint", "lint": "eslint --cache .", "lintfix": "eslint --fix --cache .", diff --git a/libraries/logger/src/index.ts b/libraries/logger/src/index.ts index 7844e11d..62f22cef 100644 --- a/libraries/logger/src/index.ts +++ b/libraries/logger/src/index.ts @@ -2,254 +2,213 @@ import {createRequire} from 'node:module' import process from 'node:process' export type LogLevel = 'error' | 'warn' | 'info' | 'debug' | 'trace' | 'fatal' | 'silent' +type LoggerMethod = (message: string | object, ...meta: unknown[]) => void export interface ILogger { - error: (message: string | object, ...meta: unknown[]) => void - warn: (message: string | object, ...meta: unknown[]) => void - info: (message: string | object, ...meta: unknown[]) => void - debug: (message: string | object, ...meta: unknown[]) => void - trace: (message: string | object, ...meta: unknown[]) => void - fatal: (message: string | object, ...meta: unknown[]) => void -} // Napi binding types (loaded at runtime) + error: LoggerMethod + warn: LoggerMethod + info: LoggerMethod + debug: LoggerMethod + trace: LoggerMethod + fatal: LoggerMethod +} + +type ActiveLogLevel = Exclude +interface PlatformBinding {readonly local: string, readonly suffix: string} interface NapiLoggerInstance { - error: (message: string) => void - errorWithMeta: (message: string, meta: string) => void - warn: (message: string) => void - warnWithMeta: (message: string, meta: string) => void - info: (message: string) => void - infoWithMeta: (message: string, meta: string) => void - debug: (message: string) => void - debugWithMeta: (message: string, meta: string) => void - trace: (message: string) => void - traceWithMeta: (message: string, meta: string) => void - fatal: (message: string) => void - fatalWithMeta: (message: string, meta: string) => void + log: (level: ActiveLogLevel, message: string, meta?: string) => void } interface NapiLoggerModule { createLogger: (namespace: string, level?: string) => NapiLoggerInstance setGlobalLogLevel: (level: string) => void getGlobalLogLevel: () => string | undefined -} // Load napi binding (CJS) with fallback to pure-TS implementation - -let napiBinding: NapiLoggerModule | null = null - -try { - const require = createRequire(import.meta.url) - const {platform, arch} = process - const platforms: Record = { - 'win32-x64': ['napi-logger.win32-x64-msvc', 'win32-x64-msvc'], - 'linux-x64': ['napi-logger.linux-x64-gnu', 'linux-x64-gnu'], - 'linux-arm64': ['napi-logger.linux-arm64-gnu', 'linux-arm64-gnu'], - 'darwin-arm64': ['napi-logger.darwin-arm64', 'darwin-arm64'], - 'darwin-x64': ['napi-logger.darwin-x64', 'darwin-x64'] +} + +const PLATFORM_BINDINGS: Record = { + 'win32-x64': {local: 'napi-logger.win32-x64-msvc', suffix: 'win32-x64-msvc'}, + 'linux-x64': {local: 'napi-logger.linux-x64-gnu', suffix: 'linux-x64-gnu'}, + 'linux-arm64': {local: 'napi-logger.linux-arm64-gnu', suffix: 'linux-arm64-gnu'}, + 'darwin-arm64': {local: 'napi-logger.darwin-arm64', suffix: 'darwin-arm64'}, + 'darwin-x64': {local: 'napi-logger.darwin-x64', suffix: 'darwin-x64'} +} + +const ACTIVE_LOG_LEVELS: readonly ActiveLogLevel[] = ['error', 'warn', 'info', 'debug', 'trace', 'fatal'] + +let napiBinding: NapiLoggerModule | undefined, + napiBindingError: Error | undefined + +function isNapiLoggerModule(value: unknown): value is NapiLoggerModule { + if (value == null || typeof value !== 'object') return false + + const candidate = value as Partial + return typeof candidate.createLogger === 'function' + && typeof candidate.setGlobalLogLevel === 'function' + && typeof candidate.getGlobalLogLevel === 'function' +} + +function getPlatformBinding(): PlatformBinding { + const binding = PLATFORM_BINDINGS[`${process.platform}-${process.arch}`] + if (binding != null) return binding + + throw new Error( + `Unsupported platform for @truenine/logger native binding: ${process.platform}-${process.arch}` + ) +} + +function formatBindingLoadError(localError: unknown, packageError: unknown, suffix: string): Error { + const localMessage = localError instanceof Error ? localError.message : String(localError) + const packageMessage = packageError instanceof Error ? packageError.message : String(packageError) + return new Error( + [ + 'Failed to load @truenine/logger native binding.', + `Tried local binary "./${PLATFORM_BINDINGS[`${process.platform}-${process.arch}`]?.local ?? 'unknown'}.node" and package "@truenine/memory-sync-cli-${suffix}".`, + `Local error: ${localMessage}`, + `Package error: ${packageMessage}`, + 'Run `pnpm -F @truenine/logger run build` to build the native module.' + ].join('\n') + ) +} + +function loadNativeBinding(): NapiLoggerModule { + const moduleUrl = import.meta.url + const runtimeRequire = createRequire(moduleUrl) + const {local, suffix} = getPlatformBinding() + + try { + return runtimeRequire(`./${local}.node`) as NapiLoggerModule } - const entry = platforms[`${platform}-${arch}`] - if (entry != null) { - const [local, suffix] = entry + catch (localError) { try { - napiBinding = require(`./${local}.node`) as NapiLoggerModule + const cliBinaryPackage = runtimeRequire(`@truenine/memory-sync-cli-${suffix}`) as Record + const loggerModule = cliBinaryPackage['logger'] + + if (isNapiLoggerModule(loggerModule)) return loggerModule + + throw new Error(`Package "@truenine/memory-sync-cli-${suffix}" does not export a logger binding`) } - catch { - try { - const pkg = require(`@truenine/memory-sync-cli-${suffix}`) as Record - napiBinding = pkg['logger'] as NapiLoggerModule - } - catch {} + catch (packageError) { + throw formatBindingLoadError(localError, packageError, suffix) } } } -catch {} // Native module not available — fall back to pure-TS implementation - -const colors = { - reset: '\x1B[0m', - red: '\x1B[31m', - yellow: '\x1B[33m', - cyan: '\x1B[36m', - magenta: '\x1B[35m', - gray: '\x1B[90m', - blue: '\x1B[34m', - green: '\x1B[32m', - white: '\x1B[37m', - dim: '\x1B[2m', - bgRed: '\x1B[41m' -} as const - -const colorize = { - red: (text: string) => `${colors.red}${text}${colors.reset}`, - yellow: (text: string) => `${colors.yellow}${text}${colors.reset}`, - cyan: (text: string) => `${colors.cyan}${text}${colors.reset}`, - magenta: (text: string) => `${colors.magenta}${text}${colors.reset}`, - gray: (text: string) => `${colors.gray}${text}${colors.reset}`, - blue: (text: string) => `${colors.blue}${text}${colors.reset}`, - green: (text: string) => `${colors.green}${text}${colors.reset}`, - white: (text: string) => `${colors.white}${text}${colors.reset}`, - dim: (text: string) => `${colors.dim}${text}${colors.reset}`, - bgRed: (text: string) => `${colors.bgRed}${text}${colors.reset}` -} - -let globalLogLevel: LogLevel | undefined - -const LEVEL_COLORS: Record string> = { - error: colorize.red, - warn: colorize.yellow, - info: colorize.cyan, - debug: colorize.magenta, - trace: colorize.gray, - fatal: colorize.bgRed -} - -const LEVEL_PRIORITY: Record = { - silent: 0, - fatal: 1, - error: 2, - warn: 3, - info: 4, - debug: 5, - trace: 6 -} - -function colorizeValue(value: unknown): string { - if (value === null) return colorize.dim('null') - if (typeof value === 'undefined') return colorize.dim('undefined') - if (typeof value === 'boolean') return colorize.yellow(String(value)) - if (typeof value === 'number') return colorize.blue(String(value)) - if (typeof value === 'string') return colorize.green(`"${value}"`) - if (Array.isArray(value)) { - if (value.length === 0) return '[]' - return `[${value.map(v => colorizeValue(v)).join(',')}]` + +function getNapiBinding(): NapiLoggerModule { + if (napiBinding != null) return napiBinding + + if (napiBindingError != null) throw napiBindingError + + try { + napiBinding = loadNativeBinding() + return napiBinding } - if (value instanceof Error) { - const errorObj: Record = { - name: value.name, - message: value.message, - stack: value.stack - } - for (const key of Object.getOwnPropertyNames(value)) { - if (key !== 'name' && key !== 'message' && key !== 'stack') errorObj[key] = (value as unknown as Record)[key] - } - return tsToJson(errorObj) + catch (error) { + napiBindingError = error instanceof Error ? error : new Error(String(error)) + throw napiBindingError + } +} + +function serializeError(error: Error): Record { + const serializedError: Record = { + name: error.name, + message: error.message, + stack: error.stack } - if (typeof value === 'object') return tsToJson(value as Record) - return String(value) -} - -function tsToJson(obj: Record): string { - const entries = Object.entries(obj) - if (entries.length === 0) return '{}' - const parts = entries.map(([k, v]) => { - const key = colorize.magenta(`"${k}"`) - return `${key}:${colorizeValue(v)}` - }) - return `{${parts.join(',')}}` -} - -function getTimestamp(): string { - const now = new Date() - const hours = String(now.getHours()).padStart(2, '0') - const minutes = String(now.getMinutes()).padStart(2, '0') - const seconds = String(now.getSeconds()).padStart(2, '0') - const ms = String(now.getMilliseconds()).padStart(3, '0') - return `${hours}:${minutes}:${seconds}.${ms}` -} - -function formatLog(level: LogLevel, namespace: string, message: unknown, meta?: Record): void { - const timestamp = getTimestamp() - const colorFn = LEVEL_COLORS[level] ?? colorize.white - const messageStr = String(message) - const hasMeta = meta != null && Object.keys(meta).length > 0 - const isEmptyMessage = messageStr === '' - const base = {$: [timestamp, colorFn(level.toUpperCase()), namespace]} - const _ = hasMeta ? isEmptyMessage ? meta : {[messageStr]: meta} : message - const output = tsToJson({...base, _} as unknown as Record) - if (level === 'error' || level === 'fatal') console.error(output) - else if (level === 'warn') console.warn(output) - // eslint-disable-next-line no-console - else if (level === 'debug' || level === 'trace') console.debug(output) - // eslint-disable-next-line no-console - else console.log(output) -} - -function createTsLevelMethod(level: LogLevel, namespace: string, currentLevel: LogLevel) { - const levelPriority = LEVEL_PRIORITY[level] - const currentPriority = LEVEL_PRIORITY[currentLevel] - return (messageOrObject: string | object, ...meta: unknown[]): void => { - if (levelPriority > currentPriority) return - if (typeof messageOrObject === 'string') { - const metaObj = meta.length === 1 && typeof meta[0] === 'object' && meta[0] !== null - ? meta[0] as Record - : meta.length > 0 ? {args: meta} : void 0 - formatLog(level, namespace, messageOrObject, metaObj) - } else if (typeof messageOrObject === 'object' && messageOrObject !== null) formatLog(level, namespace, '', messageOrObject as Record) - else formatLog(level, namespace, messageOrObject) + + for (const key of Object.getOwnPropertyNames(error)) { + if (key === 'name' || key === 'message' || key === 'stack') continue + + serializedError[key] = (error as unknown as Record)[key] } + + return serializedError } -function createTsFallbackLogger(namespace: string, logLevel?: LogLevel): ILogger { - const level = logLevel ?? globalLogLevel ?? (process.env['LOG_LEVEL'] as LogLevel) ?? 'info' - return { - error: createTsLevelMethod('error', namespace, level), - warn: createTsLevelMethod('warn', namespace, level), - info: createTsLevelMethod('info', namespace, level), - debug: createTsLevelMethod('debug', namespace, level), - trace: createTsLevelMethod('trace', namespace, level), - fatal: createTsLevelMethod('fatal', namespace, level) +function createJsonReplacer(): (this: unknown, key: string, value: unknown) => unknown { + const seen = new WeakSet() + + return function jsonReplacer(_key: string, value: unknown): unknown { + if (value instanceof Error) return serializeError(value) + + if (typeof value === 'bigint') return value.toString() + + if (typeof value === 'function') return `[Function ${value.name || 'anonymous'}]` + + if (typeof value === 'symbol') return value.toString() + + if (typeof value !== 'object' || value === null) return value + + if (seen.has(value)) return '[Circular]' + + seen.add(value) + return value } -} // Napi adapter — wraps NapiLoggerInstance to implement ILogger +} -function serializeMeta(message: string | object, meta: unknown[]): {msg: string, metaStr: string | undefined} { - if (typeof message !== 'string') return {msg: '', metaStr: JSON.stringify(message)} +function serializePayload(value: unknown): string { + return JSON.stringify(value, createJsonReplacer()) ?? 'null' +} - const metaObj = meta.length === 1 && typeof meta[0] === 'object' && meta[0] !== null +function normalizeLogArguments(message: string | object, meta: unknown[]): {message: string, metaJson: string | undefined} { + if (typeof message !== 'string') { + return { + message: '', + metaJson: serializePayload(message) + } + } + + const metaValue = meta.length === 1 && typeof meta[0] === 'object' && meta[0] !== null ? meta[0] : meta.length > 0 ? {args: meta} : void 0 - return {msg: message, metaStr: metaObj != null ? JSON.stringify(metaObj) : void 0} + + return { + message, + metaJson: metaValue == null ? void 0 : serializePayload(metaValue) + } } -function createNapiAdapter(instance: NapiLoggerInstance): ILogger { - function makeMethod( - plain: (msg: string) => void, - withMeta: (msg: string, meta: string) => void - ) { - return (message: string | object, ...meta: unknown[]): void => { - const {msg, metaStr} = serializeMeta(message, meta) - if (metaStr != null) withMeta(msg, metaStr) - else plain(msg) - } +function createLogMethod(instance: NapiLoggerInstance, level: ActiveLogLevel): LoggerMethod { + return (message: string | object, ...meta: unknown[]): void => { + const {message: normalizedMessage, metaJson} = normalizeLogArguments(message, meta) + instance.log(level, normalizedMessage, metaJson) } +} + +function createNapiAdapter(instance: NapiLoggerInstance): ILogger { + const methods = ACTIVE_LOG_LEVELS.reduce((logger, level) => { + logger[level] = createLogMethod(instance, level) + return logger + }, {} as Record) + return { - error: makeMethod(m => instance.error(m), (m, s) => instance.errorWithMeta(m, s)), - warn: makeMethod(m => instance.warn(m), (m, s) => instance.warnWithMeta(m, s)), - info: makeMethod(m => instance.info(m), (m, s) => instance.infoWithMeta(m, s)), - debug: makeMethod(m => instance.debug(m), (m, s) => instance.debugWithMeta(m, s)), - trace: makeMethod(m => instance.trace(m), (m, s) => instance.traceWithMeta(m, s)), - fatal: makeMethod(m => instance.fatal(m), (m, s) => instance.fatalWithMeta(m, s)) + error: methods.error, + warn: methods.warn, + info: methods.info, + debug: methods.debug, + trace: methods.trace, + fatal: methods.fatal } -} // Public API +} /** * Set the global log level for all loggers. */ export function setGlobalLogLevel(level: LogLevel): void { - globalLogLevel = level - napiBinding?.setGlobalLogLevel(level) + getNapiBinding().setGlobalLogLevel(level) } /** * Get the current global log level. */ export function getGlobalLogLevel(): LogLevel | undefined { - if (napiBinding != null) return napiBinding.getGlobalLogLevel() as LogLevel | undefined - return globalLogLevel + return getNapiBinding().getGlobalLogLevel() as LogLevel | undefined } /** - * Create a logger. Uses Rust napi-logger when available, falls back to pure-TS. + * Create a logger backed by the Rust native binding. */ export function createLogger(namespace: string, logLevel?: LogLevel): ILogger { - if (napiBinding == null) return createTsFallbackLogger(namespace, logLevel) - - const instance = napiBinding.createLogger(namespace, logLevel) + const instance = getNapiBinding().createLogger(namespace, logLevel) return createNapiAdapter(instance) } diff --git a/libraries/logger/src/lib.rs b/libraries/logger/src/lib.rs index a2b7c1e1..315b7c5b 100644 --- a/libraries/logger/src/lib.rs +++ b/libraries/logger/src/lib.rs @@ -7,6 +7,7 @@ //! This logger is designed to be consumed by both CLI (human-readable with colors) //! and GUI (parsed as JSON after stripping ANSI codes). +use chrono::{Local, Timelike}; use std::sync::atomic::{AtomicU8, Ordering}; use serde::Serialize; @@ -200,52 +201,15 @@ fn to_colored_json(val: &Value) -> String { // --------------------------------------------------------------------------- #[allow(dead_code)] -fn get_timestamp() -> String { - let now = std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap_or_default(); - let total_ms = now.as_millis(); - let ms = (total_ms % 1000) as u32; - let total_secs = (total_ms / 1000) as u64; - let secs = (total_secs % 60) as u32; - let total_mins = total_secs / 60; - let mins = (total_mins % 60) as u32; - let hours = ((total_mins / 60) % 24) as u32; - - format!("{:02}:{:02}:{:02}.{:03}", hours, mins, secs, ms) -} - -#[cfg(windows)] -fn get_local_timestamp() -> String { - #[repr(C)] - struct SystemTime { - w_year: u16, - w_month: u16, - w_day_of_week: u16, - w_day: u16, - w_hour: u16, - w_minute: u16, - w_second: u16, - w_milliseconds: u16, - } - unsafe extern "system" { - fn GetLocalTime(lp_system_time: *mut SystemTime); - } - let mut st = SystemTime { - w_year: 0, w_month: 0, w_day_of_week: 0, w_day: 0, - w_hour: 0, w_minute: 0, w_second: 0, w_milliseconds: 0, - }; - unsafe { GetLocalTime(&mut st); } - format!("{:02}:{:02}:{:02}.{:03}", st.w_hour, st.w_minute, st.w_second, st.w_milliseconds) -} - -#[cfg(not(windows))] -fn get_local_timestamp() -> String { - get_timestamp() -} - fn timestamp() -> String { - get_local_timestamp() + let now = Local::now(); + format!( + "{:02}:{:02}:{:02}.{:03}", + now.hour(), + now.minute(), + now.second(), + now.timestamp_subsec_millis() + ) } // --------------------------------------------------------------------------- @@ -261,22 +225,7 @@ fn format_log( let ts = timestamp(); let color_fn = level.color_fn(); - let payload = match meta { - Some(meta_val) if meta_val.is_object() && !meta_val.as_object().unwrap().is_empty() => { - let msg_str = match message { - Value::String(s) => s.clone(), - _ => String::new(), - }; - if msg_str.is_empty() { - meta_val.clone() - } else { - let mut map = serde_json::Map::new(); - map.insert(msg_str, meta_val.clone()); - Value::Object(map) - } - } - _ => message.clone(), - }; + let payload = build_payload(message, meta); let record = LogRecord { meta: (ts.clone(), level.as_str().to_string(), namespace.to_string()), @@ -305,6 +254,36 @@ fn format_log( record } +fn build_payload(message: &Value, meta: Option<&Value>) -> Value { + let Some(meta_val) = meta else { + return message.clone(); + }; + + if meta_val.as_object().is_some_and(|object| object.is_empty()) { + return message.clone(); + } + + let message_str = match message { + Value::String(s) => s.as_str(), + _ => "", + }; + + if message_str.is_empty() { + return meta_val.clone(); + } + + if meta_val.is_object() { + let mut map = serde_json::Map::new(); + map.insert(message_str.to_string(), meta_val.clone()); + return Value::Object(map); + } + + let mut map = serde_json::Map::new(); + map.insert("message".to_string(), Value::String(message_str.to_string())); + map.insert("meta".to_string(), meta_val.clone()); + Value::Object(map) +} + // --------------------------------------------------------------------------- // Logger // --------------------------------------------------------------------------- @@ -405,14 +384,30 @@ macro_rules! log_debug { #[cfg(feature = "napi")] mod napi_binding { + use super::{ + LogLevel, Logger, create_logger as core_create_logger, get_global_log_level as core_get_global, + set_global_log_level as core_set_global, + }; use napi_derive::napi; use serde_json::Value; - use super::{LogLevel, Logger, create_logger as core_create_logger, set_global_log_level as core_set_global, get_global_log_level as core_get_global}; fn parse_level(s: &str) -> Option { LogLevel::from_str_loose(s) } + fn parse_meta(meta_json: Option) -> Option { + let meta = meta_json?; + match serde_json::from_str(&meta) { + Ok(value) => Some(value), + Err(_) => Some(Value::String(meta)), + } + } + + fn parse_level_or_error(level: &str) -> napi::Result { + parse_level(level) + .ok_or_else(|| napi::Error::from_reason(format!("Invalid log level: {level}"))) + } + #[napi] pub struct NapiLogger { inner: Logger, @@ -421,85 +416,35 @@ mod napi_binding { #[napi] impl NapiLogger { #[napi] - pub fn error(&self, message: String) { - self.inner.error(Value::String(message), None); - } - - #[napi] - pub fn error_with_meta(&self, message: String, meta: String) { - let meta_val: Value = serde_json::from_str(&meta).unwrap_or(Value::String(meta)); - self.inner.error(Value::String(message), Some(meta_val)); - } - - #[napi] - pub fn warn(&self, message: String) { - self.inner.warn(Value::String(message), None); - } - - #[napi] - pub fn warn_with_meta(&self, message: String, meta: String) { - let meta_val: Value = serde_json::from_str(&meta).unwrap_or(Value::String(meta)); - self.inner.warn(Value::String(message), Some(meta_val)); - } - - #[napi] - pub fn info(&self, message: String) { - self.inner.info(Value::String(message), None); - } - - #[napi] - pub fn info_with_meta(&self, message: String, meta: String) { - let meta_val: Value = serde_json::from_str(&meta).unwrap_or(Value::String(meta)); - self.inner.info(Value::String(message), Some(meta_val)); - } - - #[napi] - pub fn debug(&self, message: String) { - self.inner.debug(Value::String(message), None); - } - - #[napi] - pub fn debug_with_meta(&self, message: String, meta: String) { - let meta_val: Value = serde_json::from_str(&meta).unwrap_or(Value::String(meta)); - self.inner.debug(Value::String(message), Some(meta_val)); - } - - #[napi] - pub fn trace(&self, message: String) { - self.inner.trace(Value::String(message), None); - } - - #[napi] - pub fn trace_with_meta(&self, message: String, meta: String) { - let meta_val: Value = serde_json::from_str(&meta).unwrap_or(Value::String(meta)); - self.inner.trace(Value::String(message), Some(meta_val)); - } - - #[napi] - pub fn fatal(&self, message: String) { - self.inner.fatal(Value::String(message), None); - } - - #[napi] - pub fn fatal_with_meta(&self, message: String, meta: String) { - let meta_val: Value = serde_json::from_str(&meta).unwrap_or(Value::String(meta)); - self.inner.fatal(Value::String(message), Some(meta_val)); + pub fn log( + &self, + level: String, + message: String, + meta_json: Option, + ) -> napi::Result<()> { + let level = parse_level_or_error(&level)?; + let meta = parse_meta(meta_json); + self.inner.log(level, Value::String(message), meta); + Ok(()) } } #[napi] - pub fn create_logger(namespace: String, level: Option) -> NapiLogger { - let log_level = level.as_deref().and_then(parse_level); - NapiLogger { + pub fn create_logger(namespace: String, level: Option) -> napi::Result { + let log_level = match level { + Some(level) => Some(parse_level_or_error(&level)?), + None => None, + }; + + Ok(NapiLogger { inner: core_create_logger(&namespace, log_level), - } + }) } #[napi] - pub fn set_global_log_level(level: String) { - if let Some(l) = parse_level(&level) { - core_set_global(l); - } + pub fn set_global_log_level(level: String) -> napi::Result<()> { + core_set_global(parse_level_or_error(&level)?); + Ok(()) } #[napi] @@ -551,6 +496,24 @@ mod tests { assert!(logger.log(LogLevel::Error, Value::String("err".into()), None).is_some()); } + #[test] + fn test_build_payload_uses_meta_when_message_is_empty() { + let payload = build_payload(&Value::String(String::new()), Some(&serde_json::json!([1, 2, 3]))); + assert_eq!(payload, serde_json::json!([1, 2, 3])); + } + + #[test] + fn test_build_payload_wraps_non_object_meta_for_named_message() { + let payload = build_payload(&Value::String("hello".into()), Some(&serde_json::json!(["x"]))); + assert_eq!( + payload, + serde_json::json!({ + "message": "hello", + "meta": ["x"], + }) + ); + } + #[test] fn test_global_log_level() { set_global_log_level(LogLevel::Debug); diff --git a/libraries/md-compiler/package.json b/libraries/md-compiler/package.json index b16e2348..95259233 100644 --- a/libraries/md-compiler/package.json +++ b/libraries/md-compiler/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/md-compiler", "type": "module", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "private": true, "description": "Rust-powered MDX→Markdown compiler for Node.js with pure-TS fallback", "license": "AGPL-3.0-only", @@ -40,7 +40,7 @@ ] }, "scripts": { - "build": "tsdown", + "build": "tsx ../../scripts/build-quiet.ts", "build:all": "run-s build:native build", "build:native": "napi build --platform --release --output-dir dist -- --features napi", "build:native:debug": "napi build --platform --output-dir dist -- --features napi", diff --git a/libraries/md-compiler/src/expression_eval.rs b/libraries/md-compiler/src/expression_eval.rs index 8011abad..5054d4bb 100644 --- a/libraries/md-compiler/src/expression_eval.rs +++ b/libraries/md-compiler/src/expression_eval.rs @@ -7,8 +7,8 @@ //! - Equality comparisons: `{os.platform === "win32"}` //! - Boolean literals: `{true}`, `{false}` -use std::collections::HashMap; use serde_json::Value; +use std::collections::HashMap; /// Evaluation scope — a map of variable names to their values. pub type EvaluationScope = HashMap; @@ -66,7 +66,9 @@ fn is_simple_reference(s: &str) -> bool { // First char must be letter, underscore, or $ match chars.peek() { - Some(c) if c.is_ascii_alphabetic() || *c == '_' || *c == '$' => { chars.next(); } + Some(c) if c.is_ascii_alphabetic() || *c == '_' || *c == '$' => { + chars.next(); + } _ => return false, } @@ -85,19 +87,29 @@ fn evaluate_simple_reference(reference: &str, scope: &EvaluationScope) -> Result let parts: Vec<&str> = reference.split('.').collect(); let root_var = parts[0]; - let root_value = scope.get(root_var) - .ok_or_else(|| format!("Undefined namespace: \"{}\" in expression \"{}\"", root_var, reference))?; + let root_value = scope.get(root_var).ok_or_else(|| { + format!( + "Undefined namespace: \"{}\" in expression \"{}\"", + root_var, reference + ) + })?; let mut value = root_value.clone(); for &prop in &parts[1..] { match &value { Value::Object(map) => { - value = map.get(prop) - .cloned() - .ok_or_else(|| format!("Undefined variable: \"{}\" in expression \"{}\"", prop, reference))?; + value = map.get(prop).cloned().ok_or_else(|| { + format!( + "Undefined variable: \"{}\" in expression \"{}\"", + prop, reference + ) + })?; } Value::Null => { - return Err(format!("Cannot read property \"{}\" of null in expression \"{}\"", prop, reference)); + return Err(format!( + "Cannot read property \"{}\" of null in expression \"{}\"", + prop, reference + )); } _ => { return Err(format!( @@ -203,7 +215,12 @@ fn find_operator(s: &str, op: char) -> Option { '`' if !in_single_quote && !in_double_quote => in_backtick = !in_backtick, '(' | '{' | '[' if !in_single_quote && !in_double_quote && !in_backtick => depth += 1, ')' | '}' | ']' if !in_single_quote && !in_double_quote && !in_backtick => depth -= 1, - c2 if c2 == op && depth == 0 && !in_single_quote && !in_double_quote && !in_backtick => { + c2 if c2 == op + && depth == 0 + && !in_single_quote + && !in_double_quote + && !in_backtick => + { return Some(i); } _ => {} @@ -250,7 +267,10 @@ mod tests { fn make_scope() -> EvaluationScope { let mut scope = EvaluationScope::new(); scope.insert("os".into(), json!({"platform": "win32", "arch": "x64"})); - scope.insert("profile".into(), json!({"name": "TrueNine", "username": "truenine"})); + scope.insert( + "profile".into(), + json!({"name": "TrueNine", "username": "truenine"}), + ); scope.insert("tool".into(), json!({"name": "cursor"})); scope } @@ -259,7 +279,10 @@ mod tests { fn test_simple_reference() { let scope = make_scope(); assert_eq!(evaluate_expression("os.platform", &scope).unwrap(), "win32"); - assert_eq!(evaluate_expression("profile.name", &scope).unwrap(), "TrueNine"); + assert_eq!( + evaluate_expression("profile.name", &scope).unwrap(), + "TrueNine" + ); assert_eq!(evaluate_expression("tool.name", &scope).unwrap(), "cursor"); } @@ -300,11 +323,16 @@ mod tests { fn test_ternary() { let scope = make_scope(); assert_eq!( - evaluate_expression("os.platform === \"win32\" ? \"windows\" : \"other\"", &scope).unwrap(), + evaluate_expression( + "os.platform === \"win32\" ? \"windows\" : \"other\"", + &scope + ) + .unwrap(), "windows" ); assert_eq!( - evaluate_expression("os.platform === \"linux\" ? \"linux\" : \"other\"", &scope).unwrap(), + evaluate_expression("os.platform === \"linux\" ? \"linux\" : \"other\"", &scope) + .unwrap(), "other" ); } @@ -312,9 +340,18 @@ mod tests { #[test] fn test_equality() { let scope = make_scope(); - assert_eq!(evaluate_expression("os.platform === \"win32\"", &scope).unwrap(), "true"); - assert_eq!(evaluate_expression("os.platform !== \"win32\"", &scope).unwrap(), "false"); - assert_eq!(evaluate_expression("os.platform === \"linux\"", &scope).unwrap(), "false"); + assert_eq!( + evaluate_expression("os.platform === \"win32\"", &scope).unwrap(), + "true" + ); + assert_eq!( + evaluate_expression("os.platform !== \"win32\"", &scope).unwrap(), + "false" + ); + assert_eq!( + evaluate_expression("os.platform === \"linux\"", &scope).unwrap(), + "false" + ); } #[test] diff --git a/libraries/md-compiler/src/index.ts b/libraries/md-compiler/src/index.ts index d9f67ccd..898fe6f2 100644 --- a/libraries/md-compiler/src/index.ts +++ b/libraries/md-compiler/src/index.ts @@ -24,13 +24,10 @@ export { evaluateJsxExpression, hasJsxInEstree } from './compiler/jsx-expression-eval' -export { - mdxToMd -} from './compiler/mdx-to-md' - export { parseMdx } from './compiler/parser' + export type { ComponentHandler, EvaluationScope, @@ -50,3 +47,6 @@ export { MdLineHandler, registerBuiltInComponents } from './components' +export { + mdxToMd +} from './mdx-to-md' diff --git a/libraries/md-compiler/src/lib.rs b/libraries/md-compiler/src/lib.rs index 00011ea2..0d616034 100644 --- a/libraries/md-compiler/src/lib.rs +++ b/libraries/md-compiler/src/lib.rs @@ -6,19 +6,15 @@ //! with custom expression evaluation, JSX component processing, and //! AST-to-Markdown serialization. -pub mod parser; pub mod expression_eval; +pub mod mdx_to_md; +pub mod parser; pub mod serializer; pub mod transformer; -pub mod mdx_to_md; pub use expression_eval::EvaluationScope; pub use mdx_to_md::{ - ExportMetadata, - MdxGlobalScope, - MdxToMdOptions, - MdxToMdResult, - mdx_to_md, + ExportMetadata, MdxGlobalScope, MdxToMdOptions, MdxToMdResult, mdx_to_md, mdx_to_md_with_metadata, }; pub use parser::parse_mdx; @@ -31,9 +27,14 @@ pub use transformer::ProcessingContext; #[cfg(feature = "napi")] mod napi_binding { + use std::collections::HashMap; + + use super::{ + EvaluationScope, MdxGlobalScope, MdxToMdOptions, mdx_to_md, mdx_to_md_with_metadata, + }; use napi_derive::napi; + use serde::Deserialize; use serde_json::Value; - use super::{mdx_to_md, MdxToMdOptions, EvaluationScope}; #[napi(object)] pub struct ParsedMarkdown { @@ -42,140 +43,207 @@ mod napi_binding { pub content_without_front_matter: String, } -// --------------------------------------------------------------------------- -// mdxToMd — convert MDX source to plain Markdown -// --------------------------------------------------------------------------- - -/// Convert MDX source to plain Markdown. -/// Returns the converted Markdown string, or throws on parse error. -#[napi] -pub fn mdx_to_md_str(content: String) -> napi::Result { - mdx_to_md(&content, None).map_err(|e| napi::Error::from_reason(e.to_string())) -} - -/// Convert MDX source to plain Markdown with a JSON scope string. -/// `scope_json` should be a JSON object string, e.g. `{"os":{"platform":"win32"}}`. -#[napi] -pub fn mdx_to_md_with_scope(content: String, scope_json: String) -> napi::Result { - let scope: EvaluationScope = - serde_json::from_str(&scope_json).map_err(|e| napi::Error::from_reason(e.to_string()))?; - let opts = MdxToMdOptions { - scope: Some(scope), - ..Default::default() - }; - mdx_to_md(&content, Some(opts)).map_err(|e| napi::Error::from_reason(e.to_string())) -} - -// --------------------------------------------------------------------------- -// buildFrontMatter / buildMarkdownWithFrontMatter -// --------------------------------------------------------------------------- - -/// Build a YAML front matter block from a JSON object string. -/// Returns a string like `---\nkey: value\n---`. -#[napi] -pub fn build_front_matter(front_matter_json: String) -> napi::Result { - let obj: Value = - serde_json::from_str(&front_matter_json).map_err(|e| napi::Error::from_reason(e.to_string()))?; - - let map = match &obj { - Value::Object(m) => m, - _ => return Err(napi::Error::from_reason("frontMatter must be a JSON object")), - }; + #[derive(Debug, Default, Deserialize)] + #[serde(rename_all = "camelCase")] + struct CompileMdxToMdOptions { + #[serde(default)] + scope: Option, + #[serde(default)] + base_path: Option, + #[serde(default)] + global_scope: Option, + #[serde(default)] + extract_metadata: bool, + } - let cleaned: serde_json::Map = map - .iter() - .filter(|(_, v)| !v.is_null()) - .map(|(k, v)| (k.clone(), v.clone())) - .collect(); + fn parse_compile_options(options_json: Option) -> napi::Result { + let parsed = match options_json { + None => CompileMdxToMdOptions::default(), + Some(json) => serde_json::from_str::(&json) + .map_err(|e| napi::Error::from_reason(e.to_string()))?, + }; - if cleaned.is_empty() { - return Ok("---\n---".to_string()); + Ok(MdxToMdOptions { + scope: parsed.scope, + base_path: parsed.base_path, + global_scope: parsed.global_scope, + extract_metadata: parsed.extract_metadata, + }) } - let yaml_str = serde_yml::to_string(&Value::Object(cleaned)) - .map_err(|e| napi::Error::from_reason(e.to_string()))?; - let yaml_trimmed = yaml_str.trim_end(); - Ok(format!("---\n{yaml_trimmed}\n---")) -} - -/// Build a Markdown string with YAML front matter prepended. -/// `front_matter_json` is a JSON object string; pass `"null"` or `"{}"` to skip. -#[napi] -pub fn build_markdown_with_front_matter( - front_matter_json: Option, - content: String, -) -> napi::Result { - let fm = match front_matter_json { - None => return Ok(content), - Some(ref s) if s == "null" || s == "{}" => return Ok(content), - Some(ref s) => s, - }; + // --------------------------------------------------------------------------- + // mdxToMd — convert MDX source to plain Markdown + // --------------------------------------------------------------------------- - let obj: Value = - serde_json::from_str(fm).map_err(|e| napi::Error::from_reason(e.to_string()))?; + /// Convert MDX source to plain Markdown. + /// Returns the converted Markdown string, or throws on parse error. + #[napi] + pub fn mdx_to_md_str(content: String) -> napi::Result { + mdx_to_md(&content, None).map_err(|e| napi::Error::from_reason(e.to_string())) + } - match &obj { - Value::Null => return Ok(content), - Value::Object(m) if m.is_empty() => return Ok(content), - _ => {} + /// Convert MDX source to plain Markdown with a JSON scope string. + /// `scope_json` should be a JSON object string, e.g. `{"os":{"platform":"win32"}}`. + #[napi] + pub fn mdx_to_md_with_scope(content: String, scope_json: String) -> napi::Result { + let scope: EvaluationScope = serde_json::from_str(&scope_json) + .map_err(|e| napi::Error::from_reason(e.to_string()))?; + let opts = MdxToMdOptions { + scope: Some(scope), + ..Default::default() + }; + mdx_to_md(&content, Some(opts)).map_err(|e| napi::Error::from_reason(e.to_string())) } - let fm_block = build_front_matter(fm.to_string())?; - Ok(format!("{fm_block}\n{content}")) -} - -// --------------------------------------------------------------------------- -// parseMarkdown — extract front matter + content -// --------------------------------------------------------------------------- - -/// Parse a Markdown/MDX string and extract YAML front matter and content. -#[napi] -pub fn parse_markdown(raw_content: String) -> ParsedMarkdown { - let front_matter_regex = regex_lite::Regex::new(r"(?s)^---\r?\n(.*?)\r?\n---\r?\n?").ok(); - - if let Some(re) = &front_matter_regex { - if let Some(caps) = re.captures(&raw_content) { - let raw_fm = caps.get(1).map(|m| m.as_str().to_string()); - let full_match = caps.get(0).map(|m| m.end()).unwrap_or(0); - let content_without = raw_content[full_match..].to_string(); - - let yaml_json = raw_fm.as_deref().and_then(|fm| { - serde_yml::from_str::(fm) - .ok() - .and_then(|v| serde_json::to_string(&v).ok()) - }); - - return ParsedMarkdown { - yaml_front_matter_json: yaml_json, - raw_front_matter: raw_fm, - content_without_front_matter: content_without, - }; - } + /// Compile MDX source with JSON options and return a JSON result payload. + #[napi] + pub fn compile_mdx_to_md( + content: String, + options_json: Option, + ) -> napi::Result { + let options = parse_compile_options(options_json)?; + + let result = if options.extract_metadata { + let compiled = mdx_to_md_with_metadata(&content, Some(options)) + .map_err(|e| napi::Error::from_reason(e.to_string()))?; + + serde_json::json!({ + "content": compiled.content, + "metadata": { + "fields": compiled.metadata.exports, + "source": compiled.metadata.source.as_str(), + }, + }) + } else { + let compiled = mdx_to_md(&content, Some(options)) + .map_err(|e| napi::Error::from_reason(e.to_string()))?; + + serde_json::json!({ + "content": compiled, + }) + }; + + serde_json::to_string(&result).map_err(|e| napi::Error::from_reason(e.to_string())) } - ParsedMarkdown { - yaml_front_matter_json: None, - raw_front_matter: None, - content_without_front_matter: raw_content, + // --------------------------------------------------------------------------- + // buildFrontMatter / buildMarkdownWithFrontMatter + // --------------------------------------------------------------------------- + + /// Build a YAML front matter block from a JSON object string. + /// Returns a string like `---\nkey: value\n---`. + #[napi] + pub fn build_front_matter(front_matter_json: String) -> napi::Result { + let obj: Value = serde_json::from_str(&front_matter_json) + .map_err(|e| napi::Error::from_reason(e.to_string()))?; + + let map = match &obj { + Value::Object(m) => m, + _ => { + return Err(napi::Error::from_reason( + "frontMatter must be a JSON object", + )); + } + }; + + let cleaned: serde_json::Map = map + .iter() + .filter(|(_, v)| !v.is_null()) + .map(|(k, v)| (k.clone(), v.clone())) + .collect(); + + if cleaned.is_empty() { + return Ok("---\n---".to_string()); + } + + let yaml_str = serde_yml::to_string(&Value::Object(cleaned)) + .map_err(|e| napi::Error::from_reason(e.to_string()))?; + let yaml_trimmed = yaml_str.trim_end(); + Ok(format!("---\n{yaml_trimmed}\n---")) } -} - -/// Transform MDX-style link/image references to plain .md extensions. -#[napi] -pub fn transform_mdx_references_to_md(content: String) -> String { - let re = regex_lite::Regex::new(r"(!?\[)([^\]]*?)(\]\()([^)]+)(\))").unwrap(); - re.replace_all(&content, |caps: ®ex_lite::Captures| { - let prefix = &caps[1]; - let text = caps[2].replace(".mdx", ".md"); - let middle = &caps[3]; - let url = &caps[4]; - let suffix = &caps[5]; - let transformed_url = if url.starts_with("http://") || url.starts_with("https://") || url.starts_with("//") { - url.to_string() - } else { - url.replace(".mdx", ".md") + + /// Build a Markdown string with YAML front matter prepended. + /// `front_matter_json` is a JSON object string; pass `"null"` or `"{}"` to skip. + #[napi] + pub fn build_markdown_with_front_matter( + front_matter_json: Option, + content: String, + ) -> napi::Result { + let fm = match front_matter_json { + None => return Ok(content), + Some(ref s) if s == "null" || s == "{}" => return Ok(content), + Some(ref s) => s, }; - format!("{prefix}{text}{middle}{transformed_url}{suffix}") - }).into_owned() -} + + let obj: Value = + serde_json::from_str(fm).map_err(|e| napi::Error::from_reason(e.to_string()))?; + + match &obj { + Value::Null => return Ok(content), + Value::Object(m) if m.is_empty() => return Ok(content), + _ => {} + } + + let fm_block = build_front_matter(fm.to_string())?; + Ok(format!("{fm_block}\n{content}")) + } + + // --------------------------------------------------------------------------- + // parseMarkdown — extract front matter + content + // --------------------------------------------------------------------------- + + /// Parse a Markdown/MDX string and extract YAML front matter and content. + #[napi] + pub fn parse_markdown(raw_content: String) -> ParsedMarkdown { + let front_matter_regex = regex_lite::Regex::new(r"(?s)^---\r?\n(.*?)\r?\n---\r?\n?").ok(); + + if let Some(re) = &front_matter_regex { + if let Some(caps) = re.captures(&raw_content) { + let raw_fm = caps.get(1).map(|m| m.as_str().to_string()); + let full_match = caps.get(0).map(|m| m.end()).unwrap_or(0); + let content_without = raw_content[full_match..].to_string(); + + let yaml_json = raw_fm.as_deref().and_then(|fm| { + serde_yml::from_str::(fm) + .ok() + .and_then(|v| serde_json::to_string(&v).ok()) + }); + + return ParsedMarkdown { + yaml_front_matter_json: yaml_json, + raw_front_matter: raw_fm, + content_without_front_matter: content_without, + }; + } + } + + ParsedMarkdown { + yaml_front_matter_json: None, + raw_front_matter: None, + content_without_front_matter: raw_content, + } + } + + /// Transform MDX-style link/image references to plain .md extensions. + #[napi] + pub fn transform_mdx_references_to_md(content: String) -> String { + let re = regex_lite::Regex::new(r"(!?\[)([^\]]*?)(\]\()([^)]+)(\))").unwrap(); + re.replace_all(&content, |caps: ®ex_lite::Captures| { + let prefix = &caps[1]; + let text = caps[2].replace(".mdx", ".md"); + let middle = &caps[3]; + let url = &caps[4]; + let suffix = &caps[5]; + let transformed_url = if url.starts_with("http://") + || url.starts_with("https://") + || url.starts_with("//") + { + url.to_string() + } else { + url.replace(".mdx", ".md") + }; + format!("{prefix}{text}{middle}{transformed_url}{suffix}") + }) + .into_owned() + } } // mod napi_binding diff --git a/libraries/md-compiler/src/markdown/index.ts b/libraries/md-compiler/src/markdown/index.ts index 67f78a99..02d3a05a 100644 --- a/libraries/md-compiler/src/markdown/index.ts +++ b/libraries/md-compiler/src/markdown/index.ts @@ -100,6 +100,24 @@ export function buildRawFrontMatter( singleQuote: options?.singleQuote ?? false, lineWidth: options?.lineWidth ?? 0 }).trimEnd() +} + +export function wrapRawFrontMatter(rawYamlContent: string): string { + const trimmed = rawYamlContent.trim() + if (trimmed.length === 0) return '---\n---' + return `---\n${trimmed}\n---` +} + +/** + * Builds complete markdown content with raw (pre-serialized) front matter. + * Use this when you have pre-serialized YAML that should not be re-parsed. + */ +export function buildMarkdownWithRawFrontMatter( + rawFrontMatter: string, + content: string +): string { + const wrapped = wrapRawFrontMatter(rawFrontMatter) + return `${wrapped}\n${content}` } // doubleQuoted — TS only (YAML-specific helper) export function doubleQuoted(value: string): unknown { diff --git a/libraries/md-compiler/src/markdown/markdown.test.ts b/libraries/md-compiler/src/markdown/markdown.test.ts index 4850d4fe..a53aa628 100644 --- a/libraries/md-compiler/src/markdown/markdown.test.ts +++ b/libraries/md-compiler/src/markdown/markdown.test.ts @@ -34,7 +34,10 @@ tags: describe('buildFrontMatter', () => { it('should build front matter with simple values', () => { const result = buildFrontMatter({name: 'test', description: 'A test'}) - expect(result).toBe('---\nname: test\ndescription: A test\n---') + expect(result.startsWith('---\n')).toBe(true) + expect(result.endsWith('\n---')).toBe(true) + const parsed = parseMarkdown(`${result}\n`) + expect(parsed.yamlFrontMatter).toEqual({name: 'test', description: 'A test'}) }) it('should build front matter with array values', () => { @@ -61,9 +64,10 @@ tags: const result = buildFrontMatter({ metadata: {version: '1.0', author: 'Test'} }) - expect(result).toContain('metadata:') - expect(result).toContain('version: "1.0"') - expect(result).toContain('author: Test') + const parsed = parseMarkdown(`${result}\n`) + expect(parsed.yamlFrontMatter).toEqual({ + metadata: {version: '1.0', author: 'Test'} + }) }) }) diff --git a/libraries/md-compiler/src/mdx-to-md.ts b/libraries/md-compiler/src/mdx-to-md.ts new file mode 100644 index 00000000..f07cd75e --- /dev/null +++ b/libraries/md-compiler/src/mdx-to-md.ts @@ -0,0 +1,133 @@ +import type {ExportMetadata, MetadataSource} from './compiler/export-parser' +import type {MdxToMdOptions, MdxToMdResult} from './compiler/types' +import {createRequire} from 'node:module' +import process from 'node:process' +import {mdxToMd as fallbackMdxToMd} from './compiler/mdx-to-md' + +interface NapiMdCompilerModule { + compileMdxToMd: (content: string, optionsJson?: string | null) => string +} + +type NativeCompileMetadata = ExportMetadata & { + readonly source: MetadataSource +} + +interface NativeCompileResult { + readonly content: string + readonly metadata?: NativeCompileMetadata +} + +let napiBinding: NapiMdCompilerModule | null = null + +try { + const require = createRequire(import.meta.url) + const {platform, arch} = process + const platforms: Record = { + 'win32-x64': ['napi-md-compiler.win32-x64-msvc', 'win32-x64-msvc'], + 'linux-x64': ['napi-md-compiler.linux-x64-gnu', 'linux-x64-gnu'], + 'linux-arm64': ['napi-md-compiler.linux-arm64-gnu', 'linux-arm64-gnu'], + 'darwin-arm64': ['napi-md-compiler.darwin-arm64', 'darwin-arm64'], + 'darwin-x64': ['napi-md-compiler.darwin-x64', 'darwin-x64'] + } + const entry = platforms[`${platform}-${arch}`] + if (entry != null) { + const [local, suffix] = entry + try { + napiBinding = require(`./${local}.node`) as NapiMdCompilerModule + } + catch { + try { + const pkg = require(`@truenine/memory-sync-cli-${suffix}`) as Record + napiBinding = pkg['mdCompiler'] as NapiMdCompilerModule + } + catch {} + } + } +} +catch {} + +export async function mdxToMd( + content: string, + options?: MdxToMdOptions & {extractMetadata?: false} +): Promise + +export async function mdxToMd( + content: string, + options: MdxToMdOptions & {extractMetadata: true} +): Promise + +export async function mdxToMd( + content: string, + options?: MdxToMdOptions +): Promise { + const metadataOptions + = options?.extractMetadata === true + ? { + ...options, + extractMetadata: true + } satisfies MdxToMdOptions & {extractMetadata: true} + : null + + const nativeResult = tryNativeCompile(content, options) + if (nativeResult != null) { + if (metadataOptions != null) { + const {metadata} = nativeResult + if (metadata == null) return fallbackMdxToMd(content, metadataOptions) + + return { + content: nativeResult.content, + metadata + } + } + + return nativeResult.content + } + + if (metadataOptions != null) return fallbackMdxToMd(content, metadataOptions) + + if (options == null) return fallbackMdxToMd(content) + + const fallbackOptions: MdxToMdOptions & {extractMetadata: false} = { + ...options, + extractMetadata: false + } + + return fallbackMdxToMd(content, fallbackOptions) +} + +function tryNativeCompile( + content: string, + options?: MdxToMdOptions +): NativeCompileResult | null { + if (napiBinding == null) return null + + try { + const raw = napiBinding.compileMdxToMd(content, serializeOptions(options)) + const result = JSON.parse(raw) as NativeCompileResult + if (options?.extractMetadata === true && result.metadata == null) return null + return result + } + catch { + return null + } +} + +function serializeOptions(options?: MdxToMdOptions): string | null { + if (options == null) return null + + const normalized = { + ...options, + ...options.globalScope != null + ? { + globalScope: { + os: options.globalScope.os, + env: options.globalScope.env, + profile: options.globalScope.profile, + tool: options.globalScope.tool + } + } + : {} + } + + return JSON.stringify(normalized) +} diff --git a/libraries/md-compiler/src/mdx_to_md.rs b/libraries/md-compiler/src/mdx_to_md.rs index de1921a0..b056f877 100644 --- a/libraries/md-compiler/src/mdx_to_md.rs +++ b/libraries/md-compiler/src/mdx_to_md.rs @@ -3,8 +3,9 @@ //! Parses MDX source, transforms the AST (evaluating expressions, expanding components), //! and serializes back to Markdown. -use std::collections::HashMap; +use serde::{Deserialize, Serialize}; use serde_json::Value; +use std::collections::HashMap; use crate::expression_eval::EvaluationScope; use crate::parser::parse_mdx; @@ -12,7 +13,8 @@ use crate::serializer::serialize; use crate::transformer::{ProcessingContext, transform_ast}; /// Global scope for MDX compilation (os, env, profile, tool info). -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct MdxGlobalScope { pub os: Option>, pub env: Option>, @@ -21,7 +23,8 @@ pub struct MdxGlobalScope { } /// Options for the `mdx_to_md` function. -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct MdxToMdOptions { pub scope: Option, pub base_path: Option, @@ -36,11 +39,32 @@ pub struct MdxToMdResult { pub metadata: ExportMetadata, } +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum MetadataSource { + Export, + #[default] + Yaml, + Mixed, +} + +impl MetadataSource { + pub fn as_str(self) -> &'static str { + match self { + Self::Export => "export", + Self::Yaml => "yaml", + Self::Mixed => "mixed", + } + } +} + /// Extracted metadata from YAML frontmatter and export statements. -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct ExportMetadata { pub yaml_front_matter: Option>, pub exports: HashMap, + pub source: MetadataSource, } /// Merge global scope with custom scope. Custom scope takes priority. @@ -55,20 +79,31 @@ fn merge_scopes( result.insert("os".into(), serde_json::to_value(os).unwrap_or(Value::Null)); } if let Some(env) = &gs.env { - result.insert("env".into(), serde_json::to_value(env).unwrap_or(Value::Null)); + result.insert( + "env".into(), + serde_json::to_value(env).unwrap_or(Value::Null), + ); } if let Some(profile) = &gs.profile { - result.insert("profile".into(), serde_json::to_value(profile).unwrap_or(Value::Null)); + result.insert( + "profile".into(), + serde_json::to_value(profile).unwrap_or(Value::Null), + ); } if let Some(tool) = &gs.tool { - result.insert("tool".into(), serde_json::to_value(tool).unwrap_or(Value::Null)); + result.insert( + "tool".into(), + serde_json::to_value(tool).unwrap_or(Value::Null), + ); } } if let Some(cs) = custom_scope { for (key, value) in cs { // Deep merge objects, override primitives - if let (Some(Value::Object(existing)), Value::Object(new_map)) = (result.get(key), value) { + if let (Some(Value::Object(existing)), Value::Object(new_map)) = + (result.get(key), value) + { let mut merged = existing.clone(); for (k, v) in new_map { merged.insert(k.clone(), v.clone()); @@ -107,7 +142,7 @@ fn extract_exports_from_source(source: &str) -> HashMap { for line in source.lines() { let trimmed = line.trim(); - if !trimmed.starts_with("export ") { + if !is_supported_export_metadata_line(trimmed) { continue; } @@ -126,12 +161,48 @@ fn extract_exports_from_source(source: &str) -> HashMap { exports } +fn is_supported_export_metadata_line(trimmed: &str) -> bool { + trimmed.starts_with("export const ") +} + +fn strip_supported_export_lines(source: &str) -> String { + let mut stripped = String::new(); + let mut skip_blank_line = false; + + for line in source.lines() { + let trimmed = line.trim(); + if is_supported_export_metadata_line(trimmed) { + skip_blank_line = true; + continue; + } + + if skip_blank_line && trimmed.is_empty() { + continue; + } + + skip_blank_line = false; + stripped.push_str(line); + stripped.push('\n'); + } + + if !source.ends_with('\n') && stripped.ends_with('\n') { + stripped.pop(); + } + + stripped +} + /// Remove YAML frontmatter and ESM export nodes from the AST. fn strip_metadata_nodes(ast: &markdown::mdast::Node) -> markdown::mdast::Node { if let markdown::mdast::Node::Root(root) = ast { - let filtered: Vec = root.children.iter() + let filtered: Vec = root + .children + .iter() .filter(|child| { - !matches!(child, markdown::mdast::Node::Yaml(_) | markdown::mdast::Node::MdxjsEsm(_)) + !matches!( + child, + markdown::mdast::Node::Yaml(_) | markdown::mdast::Node::MdxjsEsm(_) + ) }) .cloned() .collect(); @@ -161,25 +232,37 @@ pub fn mdx_to_md_with_metadata( options: Option, ) -> Result { let opts = options.unwrap_or_default(); - let ast = parse_mdx(content)?; + let stripped_source = strip_supported_export_lines(content); + let ast = parse_mdx(&stripped_source)?; // Extract metadata let yaml_fm = extract_yaml_frontmatter(&ast); - let exports = extract_exports_from_source(content); + let mut exports = extract_exports_from_source(content); + let has_yaml_front_matter = yaml_fm + .as_ref() + .is_some_and(|front_matter| !front_matter.is_empty()); + let has_export_metadata = !exports.is_empty(); + let source = match (has_export_metadata, has_yaml_front_matter) { + (true, true) => MetadataSource::Mixed, + (true, false) => MetadataSource::Export, + _ => MetadataSource::Yaml, + }; let mut metadata = ExportMetadata { yaml_front_matter: yaml_fm.clone(), - exports, + exports: HashMap::new(), + source, }; // Merge YAML frontmatter into exports (exports take priority) if let Some(yaml) = &yaml_fm { for (k, v) in yaml { - if !metadata.exports.contains_key(k) { - metadata.exports.insert(k.clone(), v.clone()); + if !exports.contains_key(k) { + exports.insert(k.clone(), v.clone()); } } } + metadata.exports = exports; // Strip metadata nodes from AST let stripped = strip_metadata_nodes(&ast); @@ -228,7 +311,8 @@ mod tests { let result = mdx_to_md( "\n\nVisible\n\n\n", Some(make_options()), - ).unwrap(); + ) + .unwrap(); assert!(result.contains("Visible"), "Got: {}", result); } @@ -237,7 +321,8 @@ mod tests { let result = mdx_to_md( "\n\nHidden\n\n\n", Some(make_options()), - ).unwrap(); + ) + .unwrap(); assert!(!result.contains("Hidden"), "Got: {}", result); } @@ -248,7 +333,11 @@ mod tests { assert!(result.content.contains("# Hello")); assert!(!result.content.contains("---")); assert_eq!( - result.metadata.exports.get("description").and_then(|v| v.as_str()), + result + .metadata + .exports + .get("description") + .and_then(|v| v.as_str()), Some("test skill") ); } @@ -258,8 +347,36 @@ mod tests { let source = "export const meta = {\"name\": \"test\"}\n\n# Hello\n"; let result = mdx_to_md_with_metadata(source, Some(make_options())).unwrap(); assert!(result.content.contains("# Hello")); + assert!(!result.content.contains("export const meta")); let meta = result.metadata.exports.get("meta"); - assert!(meta.is_some(), "Expected meta export, got: {:?}", result.metadata.exports); + assert!( + meta.is_some(), + "Expected meta export, got: {:?}", + result.metadata.exports + ); + } + + #[test] + fn test_supported_export_lines_are_removed_from_compiled_content() { + let source = "---\ndescription: dist\n---\nexport const x = 1\n\nCommand dist\n"; + let result = mdx_to_md_with_metadata(source, Some(make_options())).unwrap(); + assert_eq!(result.content, "Command dist"); + assert_eq!( + result + .metadata + .exports + .get("x") + .and_then(|value| value.as_i64()), + Some(1) + ); + assert_eq!( + result + .metadata + .exports + .get("description") + .and_then(|value| value.as_str()), + Some("dist") + ); } #[test] diff --git a/libraries/md-compiler/src/parser.rs b/libraries/md-compiler/src/parser.rs index ae447c7c..91f91a32 100644 --- a/libraries/md-compiler/src/parser.rs +++ b/libraries/md-compiler/src/parser.rs @@ -2,7 +2,7 @@ //! //! Parses MDX source into an mdast AST with MDX extensions, GFM, and frontmatter. -use markdown::{mdast::Node, to_mdast, ParseOptions}; +use markdown::{ParseOptions, mdast::Node, to_mdast}; /// Parse an MDX string into an mdast AST. /// @@ -63,7 +63,10 @@ mod tests { let node = result.unwrap(); match &node { Node::Root(root) => { - let has_jsx = root.children.iter().any(|c| matches!(c, Node::MdxJsxFlowElement(_))); + let has_jsx = root + .children + .iter() + .any(|c| matches!(c, Node::MdxJsxFlowElement(_))); assert!(has_jsx, "Expected MdxJsxFlowElement"); } _ => panic!("Expected Root node"), diff --git a/libraries/md-compiler/src/serializer.rs b/libraries/md-compiler/src/serializer.rs index 6cab7253..8d2fa4c8 100644 --- a/libraries/md-compiler/src/serializer.rs +++ b/libraries/md-compiler/src/serializer.rs @@ -267,7 +267,11 @@ fn serialize_table(table: &markdown::mdast::Table, out: &mut String) { // Separator row out.push('|'); for (i, _) in header.children.iter().enumerate() { - let align = table.align.get(i).copied().unwrap_or(markdown::mdast::AlignKind::None); + let align = table + .align + .get(i) + .copied() + .unwrap_or(markdown::mdast::AlignKind::None); match align { markdown::mdast::AlignKind::Left => out.push_str(" :--- |"), markdown::mdast::AlignKind::Right => out.push_str(" ---: |"), diff --git a/libraries/md-compiler/src/transformer.rs b/libraries/md-compiler/src/transformer.rs index bd33c81a..36cb7071 100644 --- a/libraries/md-compiler/src/transformer.rs +++ b/libraries/md-compiler/src/transformer.rs @@ -3,16 +3,17 @@ //! Walks the mdast AST, evaluating expressions, expanding components, //! and converting JSX elements to Markdown equivalents. -use std::collections::HashMap; -use markdown::mdast::*; use crate::expression_eval::{EvaluationScope, evaluate_expression}; +use markdown::mdast::*; +use std::collections::HashMap; // --------------------------------------------------------------------------- // Processing context // --------------------------------------------------------------------------- /// Component handler function type. -pub type ComponentHandler = Box Vec + Send + Sync>; +pub type ComponentHandler = + Box Vec + Send + Sync>; /// Processing context passed through the AST transformation. pub struct ProcessingContext { @@ -41,27 +42,33 @@ impl ProcessingContext { fn register_built_in_components(ctx: &mut ProcessingContext) { // — conditional block wrapper - ctx.components.insert("Md".to_string(), Box::new(|element, ctx| { - if !evaluate_when_condition(element, ctx) { - return vec![]; - } - transform_children(&element.children, ctx) - })); + ctx.components.insert( + "Md".to_string(), + Box::new(|element, ctx| { + if !evaluate_when_condition(element, ctx) { + return vec![]; + } + transform_children(&element.children, ctx) + }), + ); // — conditional inline text - ctx.components.insert("Md.Line".to_string(), Box::new(|element, ctx| { - if !evaluate_when_condition(element, ctx) { - return vec![]; - } - let text = extract_text_content(&element.children, &ctx.scope); - if text.is_empty() { - return vec![]; - } - vec![Node::Text(Text { - value: text, - position: None, - })] - })); + ctx.components.insert( + "Md.Line".to_string(), + Box::new(|element, ctx| { + if !evaluate_when_condition(element, ctx) { + return vec![]; + } + let text = extract_text_content(&element.children, &ctx.scope); + if text.is_empty() { + return vec![]; + } + vec![Node::Text(Text { + value: text, + position: None, + })] + }), + ); } /// Evaluate the `when` attribute of a JSX element. @@ -135,7 +142,10 @@ fn extract_text_content(children: &[Node], scope: &EvaluationScope) -> String { // JSX to Markdown conversion (for HTML-like elements) // --------------------------------------------------------------------------- -fn convert_jsx_to_markdown(element: &MdxJsxFlowElement, ctx: &ProcessingContext) -> Option> { +fn convert_jsx_to_markdown( + element: &MdxJsxFlowElement, + ctx: &ProcessingContext, +) -> Option> { let name = element.name.as_deref()?.to_lowercase(); match name.as_str() { "pre" => convert_pre_element(element, ctx), @@ -148,7 +158,10 @@ fn convert_jsx_to_markdown(element: &MdxJsxFlowElement, ctx: &ProcessingContext) } } -fn convert_jsx_text_to_markdown(element: &MdxJsxTextElement, ctx: &ProcessingContext) -> Option> { +fn convert_jsx_text_to_markdown( + element: &MdxJsxTextElement, + ctx: &ProcessingContext, +) -> Option> { let name = element.name.as_deref()?.to_lowercase(); match name.as_str() { "a" => convert_link_text_element(element, ctx), @@ -158,7 +171,11 @@ fn convert_jsx_text_to_markdown(element: &MdxJsxTextElement, ctx: &ProcessingCon } } -fn get_attribute_value(attrs: &[AttributeContent], name: &str, scope: &EvaluationScope) -> Option { +fn get_attribute_value( + attrs: &[AttributeContent], + name: &str, + scope: &EvaluationScope, +) -> Option { for attr in attrs { if let AttributeContent::Property(prop) = attr { if prop.name == name { @@ -177,14 +194,17 @@ fn get_attribute_value(attrs: &[AttributeContent], name: &str, scope: &Evaluatio fn convert_pre_element(element: &MdxJsxFlowElement, ctx: &ProcessingContext) -> Option> { // Find child - let code_child = element.children.iter().find_map(|child| { - match child { - Node::MdxJsxFlowElement(el) if el.name.as_deref().map(|n| n.to_lowercase()) == Some("code".into()) => Some(el), - _ => None, + let code_child = element.children.iter().find_map(|child| match child { + Node::MdxJsxFlowElement(el) + if el.name.as_deref().map(|n| n.to_lowercase()) == Some("code".into()) => + { + Some(el) } + _ => None, })?; - let class_name = get_attribute_value(&code_child.attributes, "className", &ctx.scope).unwrap_or_default(); + let class_name = + get_attribute_value(&code_child.attributes, "className", &ctx.scope).unwrap_or_default(); let lang = regex_extract_lang(&class_name); let code_text = extract_text_content(&code_child.children, &ctx.scope); @@ -200,7 +220,9 @@ fn regex_extract_lang(class_name: &str) -> Option<&str> { // Match "language-xxx" if let Some(start) = class_name.find("language-") { let rest = &class_name[start + 9..]; - let end = rest.find(|c: char| !c.is_ascii_alphanumeric() && c != '-' && c != '_').unwrap_or(rest.len()); + let end = rest + .find(|c: char| !c.is_ascii_alphanumeric() && c != '-' && c != '_') + .unwrap_or(rest.len()); if end > 0 { return Some(&rest[..end]); } @@ -210,74 +232,116 @@ fn regex_extract_lang(class_name: &str) -> Option<&str> { fn convert_link_element(element: &MdxJsxFlowElement, ctx: &ProcessingContext) -> Option> { let href = get_attribute_value(&element.attributes, "href", &ctx.scope)?; - if href.is_empty() { return None; } + if href.is_empty() { + return None; + } let text = extract_text_content(&element.children, &ctx.scope); let title = get_attribute_value(&element.attributes, "title", &ctx.scope); Some(vec![Node::Paragraph(Paragraph { children: vec![Node::Link(Link { url: href, title, - children: vec![Node::Text(Text { value: text, position: None })], + children: vec![Node::Text(Text { + value: text, + position: None, + })], position: None, })], position: None, })]) } -fn convert_link_text_element(element: &MdxJsxTextElement, ctx: &ProcessingContext) -> Option> { +fn convert_link_text_element( + element: &MdxJsxTextElement, + ctx: &ProcessingContext, +) -> Option> { let href = get_attribute_value(&element.attributes, "href", &ctx.scope)?; - if href.is_empty() { return None; } + if href.is_empty() { + return None; + } let text = extract_text_content(&element.children, &ctx.scope); let title = get_attribute_value(&element.attributes, "title", &ctx.scope); Some(vec![Node::Link(Link { url: href, title, - children: vec![Node::Text(Text { value: text, position: None })], + children: vec![Node::Text(Text { + value: text, + position: None, + })], position: None, })]) } -fn convert_strong_element(element: &MdxJsxFlowElement, ctx: &ProcessingContext) -> Option> { +fn convert_strong_element( + element: &MdxJsxFlowElement, + ctx: &ProcessingContext, +) -> Option> { let text = extract_text_content(&element.children, &ctx.scope); Some(vec![Node::Paragraph(Paragraph { children: vec![Node::Strong(Strong { - children: vec![Node::Text(Text { value: text, position: None })], + children: vec![Node::Text(Text { + value: text, + position: None, + })], position: None, })], position: None, })]) } -fn convert_strong_text_element(element: &MdxJsxTextElement, ctx: &ProcessingContext) -> Option> { +fn convert_strong_text_element( + element: &MdxJsxTextElement, + ctx: &ProcessingContext, +) -> Option> { let text = extract_text_content(&element.children, &ctx.scope); Some(vec![Node::Strong(Strong { - children: vec![Node::Text(Text { value: text, position: None })], + children: vec![Node::Text(Text { + value: text, + position: None, + })], position: None, })]) } -fn convert_emphasis_element(element: &MdxJsxFlowElement, ctx: &ProcessingContext) -> Option> { +fn convert_emphasis_element( + element: &MdxJsxFlowElement, + ctx: &ProcessingContext, +) -> Option> { let text = extract_text_content(&element.children, &ctx.scope); Some(vec![Node::Paragraph(Paragraph { children: vec![Node::Emphasis(Emphasis { - children: vec![Node::Text(Text { value: text, position: None })], + children: vec![Node::Text(Text { + value: text, + position: None, + })], position: None, })], position: None, })]) } -fn convert_emphasis_text_element(element: &MdxJsxTextElement, ctx: &ProcessingContext) -> Option> { +fn convert_emphasis_text_element( + element: &MdxJsxTextElement, + ctx: &ProcessingContext, +) -> Option> { let text = extract_text_content(&element.children, &ctx.scope); Some(vec![Node::Emphasis(Emphasis { - children: vec![Node::Text(Text { value: text, position: None })], + children: vec![Node::Text(Text { + value: text, + position: None, + })], position: None, })]) } -fn convert_image_element(element: &MdxJsxFlowElement, ctx: &ProcessingContext) -> Option> { +fn convert_image_element( + element: &MdxJsxFlowElement, + ctx: &ProcessingContext, +) -> Option> { let src = get_attribute_value(&element.attributes, "src", &ctx.scope)?; - if src.is_empty() { return None; } + if src.is_empty() { + return None; + } let alt = get_attribute_value(&element.attributes, "alt", &ctx.scope).unwrap_or_default(); let title = get_attribute_value(&element.attributes, "title", &ctx.scope); Some(vec![Node::Paragraph(Paragraph { @@ -291,11 +355,17 @@ fn convert_image_element(element: &MdxJsxFlowElement, ctx: &ProcessingContext) - })]) } -fn convert_blockquote_element(element: &MdxJsxFlowElement, ctx: &ProcessingContext) -> Option> { +fn convert_blockquote_element( + element: &MdxJsxFlowElement, + ctx: &ProcessingContext, +) -> Option> { let text = extract_text_content(&element.children, &ctx.scope); Some(vec![Node::Blockquote(Blockquote { children: vec![Node::Paragraph(Paragraph { - children: vec![Node::Text(Text { value: text, position: None })], + children: vec![Node::Text(Text { + value: text, + position: None, + })], position: None, })], position: None, @@ -339,7 +409,10 @@ fn transform_children(children: &[Node], ctx: &ProcessingContext) -> Vec { match evaluate_expression(&expr.value, &ctx.scope) { Ok(val) if !val.is_empty() => { result.push(Node::Paragraph(Paragraph { - children: vec![Node::Text(Text { value: val, position: None })], + children: vec![Node::Text(Text { + value: val, + position: None, + })], position: None, })); } @@ -388,18 +461,22 @@ fn transform_children(children: &[Node], ctx: &ProcessingContext) -> Vec { })); } Node::List(list) => { - let new_children: Vec = list.children.iter().map(|item| { - if let Node::ListItem(li) = item { - Node::ListItem(ListItem { - children: transform_children(&li.children, ctx), - position: li.position.clone(), - spread: li.spread, - checked: li.checked, - }) - } else { - item.clone() - } - }).collect(); + let new_children: Vec = list + .children + .iter() + .map(|item| { + if let Node::ListItem(li) = item { + Node::ListItem(ListItem { + children: transform_children(&li.children, ctx), + position: li.position.clone(), + spread: li.spread, + checked: li.checked, + }) + } else { + item.clone() + } + }) + .collect(); result.push(Node::List(List { children: new_children, position: list.position.clone(), @@ -411,16 +488,22 @@ fn transform_children(children: &[Node], ctx: &ProcessingContext) -> Vec { Node::Link(link) => { let new_children = transform_inline_children(&link.children, ctx); // Simplify link text that looks like file paths - let simplified = new_children.into_iter().map(|c| { - if let Node::Text(t) = &c { - if t.value.contains('/') && t.value.contains('.') { - if let Some(basename) = t.value.rsplit('/').next() { - return Node::Text(Text { value: basename.to_string(), position: t.position.clone() }); + let simplified = new_children + .into_iter() + .map(|c| { + if let Node::Text(t) = &c { + if t.value.contains('/') && t.value.contains('.') { + if let Some(basename) = t.value.rsplit('/').next() { + return Node::Text(Text { + value: basename.to_string(), + position: t.position.clone(), + }); + } } } - } - c - }).collect(); + c + }) + .collect(); result.push(Node::Link(Link { children: simplified, position: link.position.clone(), @@ -450,26 +533,34 @@ fn transform_children(children: &[Node], ctx: &ProcessingContext) -> Vec { })); } Node::Table(table) => { - let new_children: Vec = table.children.iter().map(|row| { - if let Node::TableRow(tr) = row { - let new_cells: Vec = tr.children.iter().map(|cell| { - if let Node::TableCell(tc) = cell { - Node::TableCell(TableCell { - children: transform_inline_children(&tc.children, ctx), - position: tc.position.clone(), + let new_children: Vec = table + .children + .iter() + .map(|row| { + if let Node::TableRow(tr) = row { + let new_cells: Vec = tr + .children + .iter() + .map(|cell| { + if let Node::TableCell(tc) = cell { + Node::TableCell(TableCell { + children: transform_inline_children(&tc.children, ctx), + position: tc.position.clone(), + }) + } else { + cell.clone() + } }) - } else { - cell.clone() - } - }).collect(); - Node::TableRow(TableRow { - children: new_cells, - position: tr.position.clone(), - }) - } else { - row.clone() - } - }).collect(); + .collect(); + Node::TableRow(TableRow { + children: new_cells, + position: tr.position.clone(), + }) + } else { + row.clone() + } + }) + .collect(); result.push(Node::Table(Table { children: new_children, position: table.position.clone(), @@ -501,11 +592,17 @@ fn transform_inline_children(children: &[Node], ctx: &ProcessingContext) -> Vec< } match evaluate_expression(&expr.value, &ctx.scope) { Ok(val) => { - result.push(Node::Text(Text { value: val, position: None })); + result.push(Node::Text(Text { + value: val, + position: None, + })); } Err(_) => { // Keep expression as-is on error - result.push(Node::Text(Text { value: String::new(), position: None })); + result.push(Node::Text(Text { + value: String::new(), + position: None, + })); } } } @@ -519,7 +616,10 @@ fn transform_inline_children(children: &[Node], ctx: &ProcessingContext) -> Vec< if evaluate_when_condition_text(element, ctx) { let text = extract_text_content(&element.children, &ctx.scope); if !text.is_empty() { - result.push(Node::Text(Text { value: text, position: None })); + result.push(Node::Text(Text { + value: text, + position: None, + })); } } } else if name == "Md" { @@ -628,13 +728,19 @@ mod tests { #[test] fn test_md_component_when_true() { - let result = compile("\n\nVisible content\n\n\n", make_scope()); + let result = compile( + "\n\nVisible content\n\n\n", + make_scope(), + ); assert!(result.contains("Visible content"), "Got: {}", result); } #[test] fn test_md_component_when_false() { - let result = compile("\n\nHidden content\n\n\n", make_scope()); + let result = compile( + "\n\nHidden content\n\n\n", + make_scope(), + ); assert!(!result.contains("Hidden content"), "Got: {}", result); } @@ -649,16 +755,16 @@ mod tests { #[test] fn test_md_line_when_false() { - let result = compile( - "Hidden\n", - make_scope(), - ); + let result = compile("Hidden\n", make_scope()); assert!(!result.contains("Hidden"), "Got: {}", result); } #[test] fn test_passthrough_markdown() { - let result = compile("# Title\n\nParagraph text.\n\n- item 1\n- item 2\n", make_scope()); + let result = compile( + "# Title\n\nParagraph text.\n\n- item 1\n- item 2\n", + make_scope(), + ); assert!(result.contains("# Title"), "Got: {}", result); assert!(result.contains("Paragraph text"), "Got: {}", result); assert!(result.contains("- item 1"), "Got: {}", result); diff --git a/libraries/script-runtime/Cargo.toml b/libraries/script-runtime/Cargo.toml new file mode 100644 index 00000000..e848f5ca --- /dev/null +++ b/libraries/script-runtime/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "tnmsc-script-runtime" +description = "Rust-backed TypeScript proxy runtime validation for tnmsc" +version.workspace = true +edition.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true + +[lib] +crate-type = ["rlib", "cdylib"] + +[features] +default = [] +napi = ["dep:napi", "dep:napi-derive"] + +[dependencies] +serde = { workspace = true } +serde_json = { workspace = true } +napi = { workspace = true, optional = true } +napi-derive = { workspace = true, optional = true } +tempfile = "3" +wait-timeout = "0.2" + +[build-dependencies] +napi-build = { workspace = true } diff --git a/libraries/script-runtime/build.rs b/libraries/script-runtime/build.rs new file mode 100644 index 00000000..f2be9938 --- /dev/null +++ b/libraries/script-runtime/build.rs @@ -0,0 +1,4 @@ +fn main() { + #[cfg(feature = "napi")] + napi_build::setup(); +} diff --git a/libraries/script-runtime/eslint.config.ts b/libraries/script-runtime/eslint.config.ts new file mode 100644 index 00000000..d1de0a15 --- /dev/null +++ b/libraries/script-runtime/eslint.config.ts @@ -0,0 +1,26 @@ +import {dirname, resolve} from 'node:path' +import {fileURLToPath} from 'node:url' + +import eslint10 from '@truenine/eslint10-config' + +const configDir = dirname(fileURLToPath(import.meta.url)) + +const config = eslint10({ + type: 'lib', + typescript: { + strictTypescriptEslint: true, + tsconfigPath: resolve(configDir, 'tsconfig.json'), + parserOptions: { + allowDefaultProject: true + } + }, + ignores: [ + '.turbo/**', + '*.md', + '**/*.md', + '**/*.toml', + '**/*.d.ts' + ] +}) + +export default config as unknown diff --git a/libraries/script-runtime/package.json b/libraries/script-runtime/package.json new file mode 100644 index 00000000..068857cf --- /dev/null +++ b/libraries/script-runtime/package.json @@ -0,0 +1,58 @@ +{ + "name": "@truenine/script-runtime", + "type": "module", + "version": "2026.10314.10606", + "private": true, + "description": "Rust-backed TypeScript proxy runtime for tnmsc", + "license": "AGPL-3.0-only", + "exports": { + "./package.json": "./package.json", + ".": { + "types": "./dist/index.d.mts", + "import": "./dist/index.mjs" + } + }, + "module": "dist/index.mjs", + "types": "dist/index.d.mts", + "files": [ + "dist" + ], + "napi": { + "binaryName": "napi-script-runtime", + "targets": [ + "x86_64-pc-windows-msvc", + "x86_64-unknown-linux-gnu", + "aarch64-unknown-linux-gnu", + "aarch64-apple-darwin", + "x86_64-apple-darwin" + ] + }, + "scripts": { + "build": "run-s build:ts build:native", + "build:all": "run-s build:ts build:native", + "build:native": "napi build --platform --release --output-dir dist -- --features napi", + "build:native:debug": "napi build --platform --output-dir dist -- --features napi", + "build:ts": "tsdown", + "check": "run-p typecheck lint", + "lint": "eslint --cache .", + "lintfix": "eslint --fix --cache .", + "prepublishOnly": "run-s build", + "test": "run-s test:rust test:ts", + "test:rust": "tsx ../../scripts/cargo-test.ts", + "test:ts": "vitest run --passWithNoTests", + "typecheck": "tsc --noEmit -p tsconfig.lib.json" + }, + "dependencies": { + "jiti": "2.6.1" + }, + "devDependencies": { + "@napi-rs/cli": "^3.5.1", + "@truenine/eslint10-config": "catalog:", + "@types/node": "catalog:", + "eslint": "catalog:", + "npm-run-all2": "catalog:", + "tsdown": "catalog:", + "typescript": "catalog:", + "vitest": "catalog:" + } +} diff --git a/libraries/script-runtime/src/index.test.ts b/libraries/script-runtime/src/index.test.ts new file mode 100644 index 00000000..bf12f306 --- /dev/null +++ b/libraries/script-runtime/src/index.test.ts @@ -0,0 +1,106 @@ +import type {ProxyContext} from './types' + +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import process from 'node:process' +import {afterEach, describe, expect, it} from 'vitest' +import {defineProxy} from './index' +import {loadProxyModule, resolvePublicPathModule} from './runtime-core' + +const tempDirs: string[] = [] + +function createTempDir(): string { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-script-runtime-')) + tempDirs.push(tempDir) + return tempDir +} + +function createContext(tempDir: string, command: ProxyContext['command'] = 'execute'): ProxyContext { + const workspaceDir = path.join(tempDir, 'workspace') + const aindexDir = path.join(workspaceDir, 'aindex') + fs.mkdirSync(path.join(aindexDir, 'public'), {recursive: true}) + + return { + cwd: workspaceDir, + workspaceDir, + aindexDir, + command, + platform: process.platform + } +} + +function writeProxyFile(tempDir: string, source: string): string { + const filePath = path.join(tempDir, 'workspace', 'aindex', 'public', 'proxy.ts') + fs.mkdirSync(path.dirname(filePath), {recursive: true}) + fs.writeFileSync(filePath, source, 'utf8') + return filePath +} + +afterEach(() => { + for (const tempDir of tempDirs.splice(0)) fs.rmSync(tempDir, {recursive: true, force: true}) +}) + +describe('runtime-core', () => { + it('loads object default exports', async () => { + const tempDir = createTempDir() + const ctx = createContext(tempDir) + const filePath = writeProxyFile( + tempDir, + 'export default { resolvePublicPath(logicalPath) { return logicalPath.replace(/^\\.git\\//, "____git/") } }\n' + ) + + const loadedModule = await loadProxyModule(filePath) + const result = await resolvePublicPathModule(filePath, ctx, '.git/info/exclude') + + expect(loadedModule.default).toBeDefined() + expect(result).toBe('____git/info/exclude') + }) + + it('loads async function exports', async () => { + const tempDir = createTempDir() + const ctx = createContext(tempDir, 'dry-run') + const filePath = writeProxyFile( + tempDir, + 'export default async (logicalPath, ctx) => ctx.command === "dry-run" ? "dry/" + logicalPath : logicalPath\n' + ) + + const result = await resolvePublicPathModule(filePath, ctx, '.vscode/settings.json') + + expect(result).toBe('dry/.vscode/settings.json') + }) + + it('skips unmatched commands', async () => { + const tempDir = createTempDir() + const ctx = createContext(tempDir, 'clean') + const filePath = writeProxyFile( + tempDir, + 'export const config = { matcher: { commands: ["execute"] } }\nexport default (logicalPath) => "shadow/" + logicalPath\n' + ) + + const result = await resolvePublicPathModule(filePath, ctx, '.editorconfig') + + expect(filePath.endsWith('proxy.ts')).toBe(true) + expect(result).toBe('.editorconfig') + }) + + it('rejects non-string path results', async () => { + const tempDir = createTempDir() + const ctx = createContext(tempDir) + const filePath = writeProxyFile(tempDir, 'export default () => ({ bad: true })\n') + + await expect(resolvePublicPathModule(filePath, ctx, '.gitignore')) + .rejects + .toThrow('proxy.ts must resolve public paths to a string') + }) + + it('exposes defineProxy as identity', () => { + const proxy = defineProxy({ + resolvePublicPath(logicalPath: string) { + return logicalPath + } + }) + + expect(proxy.resolvePublicPath?.('.gitignore', createContext(createTempDir()))).toBe('.gitignore') + }) +}) diff --git a/libraries/script-runtime/src/index.ts b/libraries/script-runtime/src/index.ts new file mode 100644 index 00000000..e54f5c8a --- /dev/null +++ b/libraries/script-runtime/src/index.ts @@ -0,0 +1,189 @@ +import type { + ProxyContext, + ProxyDefinition, + ProxyModule, + ProxyModuleConfig, + ProxyRouteHandler, + ValidatePublicPathOptions +} from './types' + +import * as fs from 'node:fs' +import {createRequire} from 'node:module' +import process from 'node:process' +import {fileURLToPath} from 'node:url' +import { + loadProxyModule as loadProxyModuleInternal, + resolvePublicPathModule +} from './runtime-core' + +export type { + ProxyCommand, + ProxyContext, + ProxyDefinition, + ProxyMatcherConfig, + ProxyModule, + ProxyModuleConfig, + ProxyRouteHandler, + ValidatePublicPathOptions +} from './types' + +interface ScriptRuntimeBinding { + validate_public_path?: (resolvedPath: string, aindexPublicDir: string) => string + validatePublicPath?: (resolvedPath: string, aindexPublicDir: string) => string + resolve_public_path?: (filePath: string, ctxJson: string, logicalPath: string) => string + resolvePublicPath?: (filePath: string, ctxJson: string, logicalPath: string) => string +} + +interface PlatformBinding { + readonly local: string + readonly suffix: string +} + +const PLATFORM_BINDINGS: Record = { + 'win32-x64': {local: 'napi-script-runtime.win32-x64-msvc', suffix: 'win32-x64-msvc'}, + 'linux-x64': {local: 'napi-script-runtime.linux-x64-gnu', suffix: 'linux-x64-gnu'}, + 'linux-arm64': {local: 'napi-script-runtime.linux-arm64-gnu', suffix: 'linux-arm64-gnu'}, + 'darwin-arm64': {local: 'napi-script-runtime.darwin-arm64', suffix: 'darwin-arm64'}, + 'darwin-x64': {local: 'napi-script-runtime.darwin-x64', suffix: 'darwin-x64'} +} + +let binding: ScriptRuntimeBinding | undefined, bindingLoadError: Error | undefined + +function getPlatformBinding(): PlatformBinding { + const platformBinding = PLATFORM_BINDINGS[`${process.platform}-${process.arch}`] + if (platformBinding != null) return platformBinding + + throw new Error( + `Unsupported platform for @truenine/script-runtime native binding: ${process.platform}-${process.arch}` + ) +} + +function isScriptRuntimeBinding(value: unknown): value is ScriptRuntimeBinding { + if (value == null || typeof value !== 'object') return false + const candidate = value as ScriptRuntimeBinding + return typeof candidate.validate_public_path === 'function' + || typeof candidate.validatePublicPath === 'function' + || typeof candidate.resolve_public_path === 'function' + || typeof candidate.resolvePublicPath === 'function' +} + +function formatBindingLoadError(localError: unknown, packageError: unknown, suffix: string): Error { + const localMessage = localError instanceof Error ? localError.message : String(localError) + const packageMessage = packageError instanceof Error ? packageError.message : String(packageError) + return new Error( + [ + 'Failed to load @truenine/script-runtime native binding.', + `Tried local binary "./${PLATFORM_BINDINGS[`${process.platform}-${process.arch}`]?.local ?? 'unknown'}.node" and package "@truenine/memory-sync-cli-${suffix}".`, + `Local error: ${localMessage}`, + `Package error: ${packageMessage}`, + 'Run `pnpm -F @truenine/script-runtime run build` to build the native module.' + ].join('\n') + ) +} + +function loadNativeBinding(): ScriptRuntimeBinding { + const runtimeRequire = createRequire(import.meta.url) + const {local, suffix} = getPlatformBinding() + + try { + return runtimeRequire(`./${local}.node`) as ScriptRuntimeBinding + } + catch (localError) { + try { + const cliBinaryPackage = runtimeRequire(`@truenine/memory-sync-cli-${suffix}`) as Record + const runtimeBinding = cliBinaryPackage['scriptRuntime'] + + if (isScriptRuntimeBinding(runtimeBinding)) return runtimeBinding + + throw new Error(`Package "@truenine/memory-sync-cli-${suffix}" does not export a scriptRuntime binding`) + } + catch (packageError) { + throw formatBindingLoadError(localError, packageError, suffix) + } + } +} + +function getBinding(): ScriptRuntimeBinding { + if (binding != null) return binding + if (bindingLoadError != null) throw bindingLoadError + + try { + binding = loadNativeBinding() + return binding + } + catch (error) { + bindingLoadError = error instanceof Error ? error : new Error(String(error)) + throw bindingLoadError + } +} + +function callValidatePublicPathBinding(resolvedPath: string, options: ValidatePublicPathOptions): string { + const nativeBinding = getBinding() + const validatePublicPathNative = nativeBinding.validate_public_path ?? nativeBinding.validatePublicPath + + if (validatePublicPathNative == null) throw new Error('validate_public_path native binding is unavailable') + + return validatePublicPathNative(resolvedPath, options.aindexPublicDir) +} + +function callResolvePublicPathBinding(filePath: string, ctxJson: string, logicalPath: string): string { + const nativeBinding = getBinding() + const resolvePublicPathNative = nativeBinding.resolve_public_path ?? nativeBinding.resolvePublicPath + + if (resolvePublicPathNative == null) throw new Error('resolve_public_path native binding is unavailable') + + return resolvePublicPathNative(filePath, ctxJson, logicalPath) +} + +function getWorkerPath(): string { + const candidatePaths: [string, string] = [ + fileURLToPath(new URL('./resolve-proxy-worker.mjs', import.meta.url)), + fileURLToPath(new URL('./script-runtime-worker.mjs', import.meta.url)) + ] + + for (const candidatePath of candidatePaths) { + if (fs.existsSync(candidatePath)) return candidatePath + } + + return candidatePaths[0] +} + +export function defineProxy(value: T): T { + return value +} + +export async function loadProxyModule(filePath: string): Promise { + return loadProxyModuleInternal(filePath) +} + +export function validatePublicPath( + resolvedPath: string, + options: ValidatePublicPathOptions +): string { + return callValidatePublicPathBinding(resolvedPath, options) +} + +export function resolvePublicPath( + filePath: string, + ctx: ProxyContext, + logicalPath: string, + timeoutMs: number = 5_000 +): string { + return callResolvePublicPathBinding(filePath, JSON.stringify({ + ...ctx, + workerPath: getWorkerPath(), + timeoutMs + }), logicalPath) +} + +export async function resolvePublicPathUnchecked( + filePath: string, + ctx: ProxyContext, + logicalPath: string +): Promise { + return resolvePublicPathModule(filePath, ctx, logicalPath) +} + +export function getProxyModuleConfig(module: ProxyModule): ProxyModuleConfig | undefined { + return module.config +} diff --git a/libraries/script-runtime/src/lib.rs b/libraries/script-runtime/src/lib.rs new file mode 100644 index 00000000..25e7f4ad --- /dev/null +++ b/libraries/script-runtime/src/lib.rs @@ -0,0 +1,294 @@ +#![deny(clippy::all)] + +use std::ffi::OsString; +use std::fs; +use std::io::Read; +use std::path::{Component, Path, PathBuf}; +use std::process::{Command, Stdio}; +use std::time::Duration; + +use serde::Deserialize; +use wait_timeout::ChildExt; + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +struct ResolvePublicPathContext { + aindex_dir: String, + worker_path: Option, + timeout_ms: Option, +} + +fn normalize_path(path: &Path) -> Result { + let mut normalized = PathBuf::new(); + + for component in path.components() { + match component { + Component::Prefix(prefix) => { + normalized.push(prefix.as_os_str()); + } + Component::RootDir => { + normalized.push(component.as_os_str()); + } + Component::CurDir => {} + Component::ParentDir => { + if !normalized.pop() { + return Err(format!("Path escapes root: {}", path.display())); + } + } + Component::Normal(segment) => { + normalized.push(segment); + } + } + } + + Ok(normalized) +} + +fn absolute_base_path(path_str: &str) -> Result { + let path = PathBuf::from(path_str); + let base_path = if path.is_absolute() { + path + } else { + std::env::current_dir() + .map_err(|error| format!("Failed to resolve current directory: {error}"))? + .join(path) + }; + + normalize_path(&base_path) +} + +fn ensure_within_root(resolved: &Path, root: &Path, label: &str) -> Result<(), String> { + if resolved.starts_with(root) { + return Ok(()); + } + + Err(format!( + "{label} escapes public root: {} is not within {}", + resolved.display(), + root.display() + )) +} + +pub fn validate_public_path_impl( + resolved_path: &str, + aindex_public_dir: &str, +) -> Result { + let trimmed_path = resolved_path.trim(); + if trimmed_path.is_empty() { + return Err("Resolved public path cannot be empty".into()); + } + + let normalized_path = trimmed_path.replace('\\', "/"); + let candidate_path = PathBuf::from(&normalized_path); + if candidate_path.is_absolute() { + return Err(format!( + "Resolved public path must be relative: {}", + candidate_path.display() + )); + } + + let normalized_relative_path = normalize_path(&candidate_path)?; + if normalized_relative_path.as_os_str().is_empty() { + return Err("Resolved public path cannot be empty".into()); + } + + let aindex_public_root = absolute_base_path(aindex_public_dir)?; + let normalized_absolute_path = + normalize_path(&aindex_public_root.join(&normalized_relative_path))?; + ensure_within_root( + &normalized_absolute_path, + &aindex_public_root, + "Resolved public path", + )?; + + Ok(normalized_relative_path.to_string_lossy().to_string()) +} + +fn candidate_node_commands() -> Vec { + let mut candidates: Vec = Vec::new(); + + if let Some(exec_path) = std::env::var_os("npm_node_execpath") { + candidates.push(exec_path); + } + if let Some(exec_path) = std::env::var_os("NODE") { + candidates.push(exec_path); + } + if let Ok(current_exe) = std::env::current_exe() { + let file_name = current_exe + .file_name() + .and_then(|value| value.to_str()) + .unwrap_or_default() + .to_ascii_lowercase(); + if file_name.contains("node") { + candidates.push(current_exe.into_os_string()); + } + } + candidates.push(OsString::from("node")); + + candidates +} + +fn find_node_command() -> Result { + for candidate in candidate_node_commands() { + let status = Command::new(&candidate) + .arg("--version") + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status(); + + if status.is_ok_and(|value| value.success()) { + return Ok(candidate); + } + } + + Err("Node.js executable was not found for resolve_public_path".into()) +} + +fn build_aindex_public_dir(aindex_dir: &str) -> Result { + let normalized = absolute_base_path(aindex_dir)?; + Ok(normalize_path(&normalized.join("public"))?) +} + +fn read_pipe_to_string(pipe: &mut Option, label: &str) -> Result { + let mut buffer: Vec = Vec::new(); + + if let Some(reader) = pipe { + reader + .read_to_end(&mut buffer) + .map_err(|error| format!("Failed to read {label}: {error}"))?; + } + + String::from_utf8(buffer).map_err(|error| format!("Invalid UTF-8 from {label}: {error}")) +} + +pub fn resolve_public_path_impl( + file_path: &str, + ctx_json: &str, + logical_path: &str, +) -> Result { + let ctx: ResolvePublicPathContext = serde_json::from_str(ctx_json) + .map_err(|error| format!("Invalid resolve_public_path context JSON: {error}"))?; + + let worker_path = match ctx.worker_path { + Some(worker_path) if !worker_path.trim().is_empty() => worker_path, + _ => { + return Err("resolve_public_path requires ctxJson.workerPath".into()); + } + }; + + let timeout = Duration::from_millis(ctx.timeout_ms.unwrap_or(5_000)); + let node_command = find_node_command()?; + + let temp_dir = tempfile::tempdir() + .map_err(|error| format!("Failed to create resolve_public_path temp directory: {error}"))?; + let ctx_path = temp_dir.path().join("proxy-context.json"); + fs::write(&ctx_path, ctx_json) + .map_err(|error| format!("Failed to write resolve_public_path context file: {error}"))?; + + let mut child = Command::new(node_command) + .arg(worker_path) + .arg(file_path) + .arg(&ctx_path) + .arg(logical_path) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .map_err(|error| format!("Failed to spawn proxy worker: {error}"))?; + + match child + .wait_timeout(timeout) + .map_err(|error| format!("Failed while waiting for proxy worker: {error}"))? + { + Some(_) => {} + None => { + child + .kill() + .map_err(|error| format!("Failed to terminate timed out proxy worker: {error}"))?; + let _ = child.wait(); + return Err(format!( + "proxy.ts execution timed out after {}ms", + timeout.as_millis() + )); + } + } + + let stdout = read_pipe_to_string(&mut child.stdout, "proxy worker stdout")?; + let stderr = read_pipe_to_string(&mut child.stderr, "proxy worker stderr")?; + let _ = child.wait(); + + if !stderr.trim().is_empty() { + return Err(stderr.trim().to_string()); + } + if stdout.trim().is_empty() { + return Err("proxy worker produced no output".into()); + } + + let aindex_public_dir = build_aindex_public_dir(&ctx.aindex_dir)?; + validate_public_path_impl(stdout.trim(), &aindex_public_dir.to_string_lossy()) +} + +#[cfg(feature = "napi")] +mod napi_binding { + use super::{resolve_public_path_impl, validate_public_path_impl}; + use napi::bindgen_prelude::Error; + use napi_derive::napi; + + #[napi] + pub fn validate_public_path( + resolved_path: String, + aindex_public_dir: String, + ) -> napi::Result { + validate_public_path_impl(&resolved_path, &aindex_public_dir).map_err(Error::from_reason) + } + + #[napi] + pub fn resolve_public_path( + file_path: String, + ctx_json: String, + logical_path: String, + ) -> napi::Result { + resolve_public_path_impl(&file_path, &ctx_json, &logical_path).map_err(Error::from_reason) + } +} + +#[cfg(test)] +mod tests { + use super::validate_public_path_impl; + use std::path::PathBuf; + + #[test] + fn validate_public_path_rejects_absolute_paths() { + let absolute_path = if cfg!(windows) { + String::from(r"C:\escape.txt") + } else { + String::from("/escape.txt") + }; + + let result = validate_public_path_impl(&absolute_path, "/tmp/workspace/aindex/public"); + assert!(result.is_err()); + } + + #[test] + fn validate_public_path_rejects_public_root_escape() { + let result = validate_public_path_impl("../escape.txt", "/tmp/workspace/aindex/public"); + assert!(result.is_err()); + } + + #[test] + fn validate_public_path_rejects_backslash_parent_segments() { + let result = validate_public_path_impl(r"..\escape.txt", "/tmp/workspace/aindex/public"); + assert!(result.is_err()); + } + + #[test] + fn validate_public_path_normalizes_segments() -> Result<(), String> { + let validated = validate_public_path_impl( + "./____git/./info/../info/exclude", + "/tmp/workspace/aindex/public", + )?; + + let validated_path = PathBuf::from(validated); + assert!(validated_path.ends_with(PathBuf::from("____git").join("info").join("exclude"))); + Ok(()) + } +} diff --git a/libraries/script-runtime/src/resolve-proxy-worker.ts b/libraries/script-runtime/src/resolve-proxy-worker.ts new file mode 100644 index 00000000..9fb0f7d3 --- /dev/null +++ b/libraries/script-runtime/src/resolve-proxy-worker.ts @@ -0,0 +1,19 @@ +import {readFileSync} from 'node:fs' +import process from 'node:process' +import {resolvePublicPathModule} from './runtime-core' + +async function main(): Promise { + const [, , filePath, ctxJsonPath, logicalPath] = process.argv + if (filePath == null || ctxJsonPath == null || logicalPath == null) throw new Error('Usage: resolve-proxy-worker ') + + const ctxJson = readFileSync(ctxJsonPath, 'utf8') + const ctx = JSON.parse(ctxJson) as Parameters[1] + const result = await resolvePublicPathModule(filePath, ctx, logicalPath) + process.stdout.write(`${result}\n`) +} + +main().catch((error: unknown) => { + const message = error instanceof Error ? error.message : String(error) + process.stderr.write(`${message}\n`) + process.exit(1) +}) diff --git a/libraries/script-runtime/src/runtime-core.ts b/libraries/script-runtime/src/runtime-core.ts new file mode 100644 index 00000000..f3f6f59b --- /dev/null +++ b/libraries/script-runtime/src/runtime-core.ts @@ -0,0 +1,104 @@ +import type {Jiti} from 'jiti' +import type {ProxyContext, ProxyDefinition, ProxyModule, ProxyRouteHandler} from './types' + +import * as fs from 'node:fs' +import * as path from 'node:path' + +function isRecord(value: unknown): value is Record { + return typeof value === 'object' && value !== null +} + +function isPlainObject(value: unknown): value is Record { + if (!isRecord(value)) return false + const prototype = Object.getPrototypeOf(value) as object | null + return prototype === Object.prototype || prototype === null +} + +async function createRuntime(): Promise { + const {createJiti} = await import('jiti') as { + createJiti: (filename: string, options: { + readonly fsCache: boolean + readonly moduleCache: boolean + readonly interopDefault: false + }) => Jiti + } + + return createJiti(import.meta.url, { + fsCache: false, + moduleCache: false, + interopDefault: false + }) +} + +function toProxyModule(rawModule: unknown): ProxyModule { + if (!isRecord(rawModule)) throw new Error('proxy.ts must export a module namespace object') + + const defaultExport = rawModule['default'] + if (defaultExport == null) throw new Error('proxy.ts must export a default value') + if (typeof defaultExport !== 'function' && !isPlainObject(defaultExport)) throw new TypeError('proxy.ts default export must be a function or plain object') + + const configExport = rawModule['config'] + if (configExport != null && !isPlainObject(configExport)) throw new Error('proxy.ts config export must be a plain object') + + const proxyModule: ProxyModule = { + default: defaultExport as ProxyModule['default'] + } + + if (configExport != null) { + return { + ...proxyModule, + config: configExport as NonNullable + } + } + + return proxyModule +} + +export async function loadProxyModule(filePath: string): Promise { + const absoluteFilePath = path.resolve(filePath) + if (!fs.existsSync(absoluteFilePath)) throw new Error(`proxy.ts not found: ${absoluteFilePath}`) + + const runtime = await createRuntime() + const loadedModule = await runtime.import(absoluteFilePath) + return toProxyModule(loadedModule) +} + +function matchesCommand(module: ProxyModule, command: ProxyContext['command']): boolean { + const commands = module.config?.matcher?.commands + if (commands == null || commands.length === 0) return true + return commands.includes(command) +} + +function assertNonEmptyPath(value: string, label: string): string { + if (value.trim().length === 0) throw new Error(`${label} cannot be empty`) + return value +} + +function getRouteHandler(handler: ProxyModule['default']): ProxyRouteHandler | undefined { + if (typeof handler === 'function') return handler + + const proxyDefinition: ProxyDefinition = handler + if (proxyDefinition.resolvePublicPath == null) return void 0 + if (typeof proxyDefinition.resolvePublicPath !== 'function') throw new TypeError('proxy.ts default export resolvePublicPath must be a function') + + return proxyDefinition.resolvePublicPath +} + +export async function resolvePublicPathModule( + filePath: string, + ctx: ProxyContext, + logicalPath: string +): Promise { + const targetLogicalPath = assertNonEmptyPath(logicalPath, 'logical public path') + const proxyModule = await loadProxyModule(filePath) + + if (!matchesCommand(proxyModule, ctx.command)) return targetLogicalPath + + const routeHandler = getRouteHandler(proxyModule.default) + if (routeHandler == null) return targetLogicalPath + + const resolvedPath = await routeHandler(targetLogicalPath, ctx) + if (typeof resolvedPath !== 'string') throw new Error('proxy.ts must resolve public paths to a string') + + return assertNonEmptyPath(resolvedPath, 'proxy.ts resolved public path') +} diff --git a/libraries/script-runtime/src/types.ts b/libraries/script-runtime/src/types.ts new file mode 100644 index 00000000..702f1095 --- /dev/null +++ b/libraries/script-runtime/src/types.ts @@ -0,0 +1,37 @@ +export type ProxyCommand = 'execute' | 'dry-run' | 'clean' | 'plugins' + +export interface ProxyContext { + readonly cwd: string + readonly workspaceDir: string + readonly aindexDir: string + readonly command: ProxyCommand + readonly platform: NodeJS.Platform +} + +export interface ProxyMatcherConfig { + readonly commands?: readonly ProxyCommand[] +} + +export interface ProxyModuleConfig { + readonly matcher?: ProxyMatcherConfig +} + +export type ProxyRouteHandler = ( + logicalPath: string, + ctx: ProxyContext +) => string | Promise + +export interface ProxyDefinition { + readonly resolvePublicPath?: ProxyRouteHandler +} + +export type ProxyHandler = ProxyDefinition | ProxyRouteHandler + +export interface ProxyModule { + readonly default: ProxyHandler + readonly config?: ProxyModuleConfig +} + +export interface ValidatePublicPathOptions { + readonly aindexPublicDir: string +} diff --git a/libraries/script-runtime/tsconfig.json b/libraries/script-runtime/tsconfig.json new file mode 100644 index 00000000..0950f1da --- /dev/null +++ b/libraries/script-runtime/tsconfig.json @@ -0,0 +1,68 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "compilerOptions": { + "noUncheckedSideEffectImports": true, + "incremental": true, + "composite": false, + "target": "ESNext", + "lib": [ + "ESNext" + ], + "moduleDetection": "force", + "useDefineForClassFields": true, + "baseUrl": ".", + "module": "ESNext", + "moduleResolution": "Bundler", + "paths": { + "@/*": [ + "./src/*" + ] + }, + "resolveJsonModule": true, + "allowImportingTsExtensions": true, + "strict": true, + "strictBindCallApply": true, + "strictFunctionTypes": true, + "strictNullChecks": true, + "strictPropertyInitialization": true, + "allowUnreachableCode": false, + "allowUnusedLabels": false, + "alwaysStrict": true, + "exactOptionalPropertyTypes": true, + "noFallthroughCasesInSwitch": true, + "noImplicitAny": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noImplicitThis": true, + "noPropertyAccessFromIndexSignature": true, + "noUncheckedIndexedAccess": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "useUnknownInCatchVariables": true, + "declaration": true, + "declarationMap": true, + "importHelpers": true, + "newLine": "lf", + "noEmit": true, + "noEmitHelpers": false, + "removeComments": false, + "sourceMap": true, + "stripInternal": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "isolatedModules": true, + "verbatimModuleSyntax": true, + "skipLibCheck": true + }, + "include": [ + "src/**/*", + "env.d.ts", + "eslint.config.ts", + "tsdown.config.ts" + ], + "exclude": [ + "../node_modules", + "dist" + ] +} diff --git a/libraries/script-runtime/tsconfig.lib.json b/libraries/script-runtime/tsconfig.lib.json new file mode 100644 index 00000000..7df70332 --- /dev/null +++ b/libraries/script-runtime/tsconfig.lib.json @@ -0,0 +1,21 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "extends": "./tsconfig.json", + "compilerOptions": { + "composite": true, + "rootDir": "./src", + "noEmit": false, + "outDir": "./dist", + "skipLibCheck": true + }, + "include": [ + "src/**/*", + "env.d.ts" + ], + "exclude": [ + "../node_modules", + "dist", + "**/*.spec.ts", + "**/*.test.ts" + ] +} diff --git a/libraries/script-runtime/tsdown.config.ts b/libraries/script-runtime/tsdown.config.ts new file mode 100644 index 00000000..1504408f --- /dev/null +++ b/libraries/script-runtime/tsdown.config.ts @@ -0,0 +1,18 @@ +import {resolve} from 'node:path' +import {defineConfig} from 'tsdown' + +export default defineConfig([ + { + entry: ['./src/index.ts', './src/resolve-proxy-worker.ts', '!**/*.{spec,test}.*'], + platform: 'node', + sourcemap: false, + unbundle: false, + inlineOnly: false, + alias: { + '@': resolve('src') + }, + format: ['esm'], + minify: false, + dts: {sourcemap: false} + } +]) diff --git a/package.json b/package.json index ddcbdacd..1b0fc421 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync", - "version": "2026.10303.11117", + "version": "2026.10314.10606", "description": "Cross-AI-tool prompt synchronisation toolkit (CLI + Tauri desktop GUI) — one ruleset, multi-target adaptation. Monorepo powered by pnpm + Turbo.", "license": "AGPL-3.0-only", "keywords": [ diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ce636706..961dc383 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -224,6 +224,9 @@ importers: '@clack/prompts': specifier: 'catalog:' version: 1.0.1 + '@truenine/script-runtime': + specifier: workspace:* + version: link:../libraries/script-runtime fast-glob: specifier: 'catalog:' version: 3.3.3 @@ -483,6 +486,37 @@ importers: specifier: 'catalog:' version: 4.0.18(@types/node@25.3.3)(jiti@2.6.1)(lightningcss@1.31.1)(tsx@4.21.0)(yaml@2.8.2) + libraries/script-runtime: + dependencies: + jiti: + specifier: 2.6.1 + version: 2.6.1 + devDependencies: + '@napi-rs/cli': + specifier: ^3.5.1 + version: 3.5.1(@emnapi/runtime@1.8.1)(@types/node@25.3.3) + '@truenine/eslint10-config': + specifier: 'catalog:' + version: 2026.10209.11105(57cd6091d29b00e41b508df9848011ef) + '@types/node': + specifier: 'catalog:' + version: 25.3.3 + eslint: + specifier: 'catalog:' + version: 10.0.2(jiti@2.6.1) + npm-run-all2: + specifier: 'catalog:' + version: 8.0.4 + tsdown: + specifier: 'catalog:' + version: 0.21.0-beta.2(synckit@0.11.12)(typescript@5.9.3) + typescript: + specifier: 'catalog:' + version: 5.9.3 + vitest: + specifier: 'catalog:' + version: 4.0.18(@types/node@25.3.3)(jiti@2.6.1)(lightningcss@1.31.1)(tsx@4.21.0)(yaml@2.8.2) + packages: '@antfu/eslint-config@6.7.1': diff --git a/scripts/build-native.ts b/scripts/build-native.ts index 164a471c..97911b5c 100644 --- a/scripts/build-native.ts +++ b/scripts/build-native.ts @@ -9,6 +9,7 @@ import {fileURLToPath} from 'node:url' const NATIVE_MODULES = [ {name: 'logger', dir: 'libraries/logger'}, {name: 'md-compiler', dir: 'libraries/md-compiler'}, + {name: 'script-runtime', dir: 'libraries/script-runtime'}, {name: 'cli', dir: 'cli'}, ] as const diff --git a/scripts/build-quiet.ts b/scripts/build-quiet.ts new file mode 100644 index 00000000..6b493eb9 --- /dev/null +++ b/scripts/build-quiet.ts @@ -0,0 +1,44 @@ +import { spawn } from 'node:child_process' + +async function runBuild(): Promise { + return new Promise((resolve, reject) => { + const child = spawn('tsdown', [], { + cwd: process.cwd(), + stdio: ['ignore', 'pipe', 'pipe'], + shell: true, + windowsHide: true, + }) + + let errorOutput = '' + + child.stdout?.on('data', (data: Buffer) => { + // 完全忽略 stdout,tsdown 的日志太详细了 + void data + }) + + child.stderr?.on('data', (data: Buffer) => { + errorOutput += data.toString() + }) + + child.on('close', (code) => { + // 以进程退出码为准,stderr 可能有警告信息 + if (code === 0) { + console.log('✓ Build successful') + resolve() + } else { + console.error('✗ Build failed') + if (errorOutput) { + console.error(errorOutput) + } + reject(new Error(`Build exited with code ${code}`)) + } + }) + + child.on('error', (err) => { + console.error('✗ Build failed:', err.message) + reject(err) + }) + }) +} + +runBuild().catch(() => process.exit(1)) diff --git a/scripts/copy-napi.ts b/scripts/copy-napi.ts index eff80158..f6cdc168 100644 --- a/scripts/copy-napi.ts +++ b/scripts/copy-napi.ts @@ -1,5 +1,5 @@ #!/usr/bin/env tsx -import {cpSync, existsSync, mkdirSync, readdirSync} from 'node:fs' +import {cpSync, existsSync, mkdirSync, readdirSync, writeFileSync} from 'node:fs' import {dirname, join, resolve} from 'node:path' import {fileURLToPath} from 'node:url' import process from 'node:process' @@ -7,6 +7,7 @@ import process from 'node:process' const NATIVE_MODULES = [ {name: 'logger', distDir: 'libraries/logger/dist'}, {name: 'md-compiler', distDir: 'libraries/md-compiler/dist'}, + {name: 'script-runtime', distDir: 'libraries/script-runtime/dist'}, {name: 'cli', distDir: 'cli/dist'}, ] as const @@ -22,12 +23,66 @@ const __dirname = dirname(fileURLToPath(import.meta.url)) const root = resolve(__dirname, '..') const suffix = PLATFORM_MAP[`${process.platform}-${process.arch}`] +const PLATFORM_PACKAGE_SHIM = `'use strict' + +const {readdirSync} = require('node:fs') +const {join} = require('node:path') + +const EXPORT_BINDINGS = [ + ['logger', 'napi-logger.'], + ['mdCompiler', 'napi-md-compiler.'], + ['scriptRuntime', 'napi-script-runtime.'], + ['config', 'napi-memory-sync-cli.'] +] + +const nodeFiles = readdirSync(__dirname).filter(file => file.endsWith('.node')) +const bindings = {} + +for (const [exportName, prefix] of EXPORT_BINDINGS) { + const file = nodeFiles.find(candidate => candidate.startsWith(prefix)) + if (file == null) continue + + Object.defineProperty(bindings, exportName, { + enumerable: true, + get() { + return require(join(__dirname, file)) + } + }) +} + +module.exports = bindings +` + +const PLATFORM_PACKAGE_TYPES = `declare const bindings: { + readonly logger?: unknown + readonly mdCompiler?: unknown + readonly scriptRuntime?: unknown + readonly config?: unknown +} + +export = bindings +` + +function writePlatformPackageShim(targetDir: string): void { + writeFileSync(join(targetDir, 'noop.cjs'), PLATFORM_PACKAGE_SHIM, 'utf8') + writeFileSync(join(targetDir, 'noop.d.ts'), PLATFORM_PACKAGE_TYPES, 'utf8') +} + +const npmPackagesDir = join(root, 'cli', 'npm') +const platformPackageDirs = readdirSync(npmPackagesDir, {withFileTypes: true}) + .filter(entry => entry.isDirectory()) + .map(entry => join(npmPackagesDir, entry.name)) + +for (const targetDir of platformPackageDirs) { + writePlatformPackageShim(targetDir) +} + if (suffix == null) { - console.warn(`[copy-napi] Unsupported platform: ${process.platform}-${process.arch}, skipping`) + console.warn(`[copy-napi] Unsupported platform: ${process.platform}-${process.arch}, wrote package shims only`) process.exit(0) } -const targetDir = join(root, 'cli', 'npm', suffix) +const targetDir = join(npmPackagesDir, suffix) mkdirSync(targetDir, {recursive: true}) let copied = 0