diff --git a/.github/actions/npm-auth-preflight/action.yml b/.github/actions/npm-auth-preflight/action.yml
new file mode 100644
index 00000000..65737313
--- /dev/null
+++ b/.github/actions/npm-auth-preflight/action.yml
@@ -0,0 +1,40 @@
+name: NPM Auth Preflight
+description: Validate npm authentication and report package access
+
+inputs:
+ registry-url:
+ description: npm registry URL
+ required: false
+ default: "https://registry.npmjs.org/"
+ package-dir:
+ description: Directory containing package.json to validate
+ required: true
+ package-name:
+ description: Display name for error messages
+ required: true
+
+runs:
+ using: composite
+ steps:
+ - name: Preflight npm auth
+ shell: bash
+ env:
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
+ REGISTRY_URL: ${{ inputs.registry-url }}
+ PACKAGE_DIR: ${{ inputs.package-dir }}
+ PACKAGE_NAME: ${{ inputs.package-name }}
+ run: |
+ set -euo pipefail
+
+ if [[ -z "${NODE_AUTH_TOKEN:-}" ]]; then
+ echo "::error::NPM_TOKEN is missing. Configure a publish-capable npm token for ${PACKAGE_NAME} before rerunning release."
+ exit 1
+ fi
+
+ pushd "$PACKAGE_DIR" >/dev/null
+ npm config set //registry.npmjs.org/:_authToken "${NODE_AUTH_TOKEN}"
+ npm_user=$(npm whoami --registry "$REGISTRY_URL")
+ popd >/dev/null
+
+ echo "Authenticated to npm as ${npm_user}"
+ echo "Deferring publish permission enforcement for ${PACKAGE_NAME} to the publish step because npm access output is not stable in this workflow."
diff --git a/.github/actions/npm-publish-package/action.yml b/.github/actions/npm-publish-package/action.yml
new file mode 100644
index 00000000..5f933126
--- /dev/null
+++ b/.github/actions/npm-publish-package/action.yml
@@ -0,0 +1,114 @@
+name: NPM Publish Package
+description: Publish a package to npm with retry and verification logic
+
+inputs:
+ registry-url:
+ description: npm registry URL
+ required: false
+ default: "https://registry.npmjs.org/"
+ package-dir:
+ description: Directory containing package.json to publish
+ required: true
+ verify-attempts:
+ description: Number of verification attempts
+ required: false
+ default: "90"
+ verify-delay:
+ description: Delay between verification attempts in seconds
+ required: false
+ default: "10"
+
+runs:
+ using: composite
+ steps:
+ - name: Publish to npm
+ shell: bash
+ env:
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
+ REGISTRY_URL: ${{ inputs.registry-url }}
+ PACKAGE_DIR: ${{ inputs.package-dir }}
+ VERIFY_ATTEMPTS: ${{ inputs.verify-attempts }}
+ VERIFY_DELAY: ${{ inputs.verify-delay }}
+ run: |
+ set -euo pipefail
+
+ package_name=$(jq -r '.name' "${PACKAGE_DIR}/package.json")
+ package_version=$(jq -r '.version' "${PACKAGE_DIR}/package.json")
+
+ registry_version_exists() {
+ local encoded_package_name
+ local version_json
+ local published_version
+
+ encoded_package_name=$(node -e 'process.stdout.write(encodeURIComponent(process.argv[1]))' "$package_name")
+ version_json=$(curl --silent --show-error --fail "${REGISTRY_URL%/}/${encoded_package_name}/${package_version}" 2>/dev/null || true)
+
+ if [[ -z "$version_json" ]]; then
+ return 1
+ fi
+
+ published_version=$(jq -r '.version // empty' <<<"$version_json")
+ [[ "$published_version" == "$package_version" ]]
+ }
+
+ version_exists() {
+ local published_version
+ published_version=$(npm view "${package_name}@${package_version}" version --registry "$REGISTRY_URL" 2>/dev/null || true)
+
+ if [[ "$published_version" == "$package_version" ]]; then
+ return 0
+ fi
+
+ registry_version_exists
+ }
+
+ verify_version_exists() {
+ local attempts="$VERIFY_ATTEMPTS"
+ local delay_seconds="$VERIFY_DELAY"
+
+ for attempt in $(seq 1 "$attempts"); do
+ if version_exists; then
+ echo "Verified ${package_name}@${package_version} on npm"
+ return 0
+ fi
+
+ if [[ "$attempt" -eq "$attempts" ]]; then
+ break
+ fi
+
+ echo "Waiting for ${package_name}@${package_version} to appear on npm (${attempt}/${attempts})..."
+ sleep "$delay_seconds"
+ done
+
+ echo "::error::${package_name}@${package_version} is still missing from npm after publish."
+ return 1
+ }
+
+ if version_exists; then
+ echo "${package_name}@${package_version} already exists on npm, skipping"
+ exit 0
+ fi
+
+ publish_log=$(mktemp)
+
+ if (cd "$PACKAGE_DIR" && pnpm publish --access public --no-git-checks) 2>&1 | tee "$publish_log"; then
+ verify_version_exists
+ rm -f "$publish_log"
+ exit 0
+ fi
+
+ if grep -Eiq 'cannot publish over the previously published versions|previously published versions' "$publish_log"; then
+ echo "${package_name}@${package_version} was already published according to npm, skipping"
+ rm -f "$publish_log"
+ exit 0
+ fi
+
+ if version_exists; then
+ echo "${package_name}@${package_version} already exists on npm after publish attempt, skipping"
+ rm -f "$publish_log"
+ exit 0
+ fi
+
+ echo "::error::Failed to publish ${package_name}@${package_version}. Exact version is still missing from npm."
+ rm -f "$publish_log"
+ exit 1
diff --git a/.github/actions/setup-cross-compile/action.yml b/.github/actions/setup-cross-compile/action.yml
new file mode 100644
index 00000000..299f4fb5
--- /dev/null
+++ b/.github/actions/setup-cross-compile/action.yml
@@ -0,0 +1,12 @@
+name: Setup Cross-Compile Linux ARM64
+description: Install aarch64-linux-gnu cross-compilation toolchain
+
+runs:
+ using: composite
+ steps:
+ - name: Install cross-compilation tools (aarch64-linux)
+ shell: bash
+ run: |
+ sudo apt-get update
+ sudo apt-get install -y gcc-aarch64-linux-gnu g++-aarch64-linux-gnu
+ echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc" >> $GITHUB_ENV
diff --git a/.github/workflows/deploy-doc.yml b/.github/workflows/deploy-doc.yml
index 7a8113f1..e6d4f930 100644
--- a/.github/workflows/deploy-doc.yml
+++ b/.github/workflows/deploy-doc.yml
@@ -62,7 +62,7 @@ jobs:
run: pnpm -C doc run lint
- name: Typecheck docs
- run: pnpm -C doc run typecheck
+ run: pnpm -C doc run check:type
- name: Pull Vercel production settings
run: pnpm dlx vercel@latest pull --yes --environment=production --token="$VERCEL_TOKEN"
diff --git a/.github/workflows/pull-request-doc.yml b/.github/workflows/pull-request-doc.yml
index 2ef3e98d..25cb459f 100644
--- a/.github/workflows/pull-request-doc.yml
+++ b/.github/workflows/pull-request-doc.yml
@@ -41,7 +41,7 @@ jobs:
run: pnpm -C doc run lint
- name: Typecheck docs
- run: pnpm -C doc run typecheck
+ run: pnpm -C doc run check:type
- name: Build docs
run: pnpm -C doc run build
diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml
index e92640e1..0577c556 100644
--- a/.github/workflows/pull-request.yml
+++ b/.github/workflows/pull-request.yml
@@ -55,10 +55,10 @@ jobs:
with:
github-token: ${{ github.token }}
- check:
+ build:
if: github.event.pull_request.draft == false
runs-on: ubuntu-24.04
- timeout-minutes: 45
+ timeout-minutes: 20
steps:
- uses: actions/checkout@v6
@@ -74,14 +74,152 @@ jobs:
- name: Build
run: pnpm run build
+ lint:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-24.04
+ timeout-minutes: 20
+ steps:
+ - uses: actions/checkout@v6
+
+ - uses: ./.github/actions/setup-node-pnpm
+
+ - uses: ./.github/actions/setup-rust
+ with:
+ cache-key: pr
+
+ - name: Build native modules
+ run: pnpm run build:native
+
- name: Lint
run: pnpm run lint
+ check-type:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-24.04
+ timeout-minutes: 20
+ steps:
+ - uses: actions/checkout@v6
+
+ - uses: ./.github/actions/setup-node-pnpm
+
+ - uses: ./.github/actions/setup-rust
+ with:
+ cache-key: pr
+
+ - name: Build native modules
+ run: pnpm run build:native
+
- name: Typecheck
- run: pnpm run typecheck
+ run: pnpm run check:type
- - name: Run tests
+ test:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-24.04
+ timeout-minutes: 20
+ steps:
+ - uses: actions/checkout@v6
+
+ - uses: ./.github/actions/setup-node-pnpm
+
+ - uses: ./.github/actions/setup-rust
+ with:
+ cache-key: pr
+
+ - name: Build native modules
+ run: pnpm run build:native
+
+ - name: Build
+ run: pnpm run build
+
+ - name: Run all tests
run: pnpm run test
+ test-sdk:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-24.04
+ timeout-minutes: 15
+ steps:
+ - uses: actions/checkout@v6
+
+ - uses: ./.github/actions/setup-node-pnpm
+
+ - uses: ./.github/actions/setup-rust
+ with:
+ cache-key: pr
+
+ - name: Build native modules
+ run: pnpm run build:native
+
+ - name: SDK tests
+ run: pnpm turbo test --filter=@truenine/memory-sync-sdk
+
+ test-cli:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-24.04
+ timeout-minutes: 15
+ steps:
+ - uses: actions/checkout@v6
+
+ - uses: ./.github/actions/setup-node-pnpm
+
+ - uses: ./.github/actions/setup-rust
+ with:
+ cache-key: pr
+
+ - name: Build native modules
+ run: pnpm run build:native
+
+ - name: CLI tests
+ run: pnpm turbo test --filter=@truenine/memory-sync-cli
+
+ test-mcp:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-24.04
+ timeout-minutes: 15
+ steps:
+ - uses: actions/checkout@v6
+
+ - uses: ./.github/actions/setup-node-pnpm
+
+ - uses: ./.github/actions/setup-rust
+ with:
+ cache-key: pr
+
+ - name: Build native modules
+ run: pnpm run build:native
+
+ - name: MCP tests
+ run: pnpm turbo test --filter=@truenine/memory-sync-mcp
+
+ test-libraries:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-24.04
+ timeout-minutes: 15
+ steps:
+ - uses: actions/checkout@v6
+
+ - uses: ./.github/actions/setup-node-pnpm
+
+ - uses: ./.github/actions/setup-rust
+ with:
+ cache-key: pr
+
+ - name: Build native modules
+ run: pnpm run build:native
+
+ - name: Library tests
+ run: pnpm turbo test --filter=@truenine/logger --filter=@truenine/md-compiler --filter=@truenine/script-runtime
+
+ test-rust:
+ if: github.event.pull_request.draft == false
+ runs-on: ubuntu-24.04
+ timeout-minutes: 20
+ steps:
+ - uses: actions/checkout@v6
+
+ - uses: ./.github/actions/setup-rust
+ with:
+ cache-key: pr
+
- name: Rust tests (excluding GUI)
run: cargo test --workspace --exclude memory-sync-gui --lib --bins --tests
diff --git a/.github/workflows/release-cli.yml b/.github/workflows/release-cli.yml
index fc4716b7..51b4f6f4 100644
--- a/.github/workflows/release-cli.yml
+++ b/.github/workflows/release-cli.yml
@@ -2,8 +2,6 @@ name: Release Packages
env:
NPM_REGISTRY_URL: https://registry.npmjs.org/
- NPM_PUBLISH_VERIFY_ATTEMPTS: "90"
- NPM_PUBLISH_VERIFY_DELAY_SECONDS: "10"
CLI_NATIVE_MODULE_DIRS: |
libraries/logger
libraries/md-compiler
@@ -114,12 +112,9 @@ jobs:
with:
targets: ${{ matrix.target.rust }}
cache-key: napi-${{ matrix.target.rust }}
- - name: Install cross-compilation tools (aarch64-linux)
+ - name: Setup cross-compile
if: matrix.target.cross
- run: |
- sudo apt-get update
- sudo apt-get install -y gcc-aarch64-linux-gnu g++-aarch64-linux-gnu
- echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc" >> $GITHUB_ENV
+ uses: ./.github/actions/setup-cross-compile
- name: Build all napi native modules
shell: bash
run: |
@@ -188,44 +183,6 @@ jobs:
with:
install: "true"
registry-url: https://registry.npmjs.org/
- - name: Preflight npm auth
- shell: bash
- env:
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- run: |
- set -euo pipefail
-
- if [[ -z "${NODE_AUTH_TOKEN:-}" ]]; then
- echo "::error::NPM_TOKEN is missing. Configure a publish-capable npm token for @truenine/* before rerunning release."
- exit 1
- fi
-
- pushd cli >/dev/null
- npm config set //registry.npmjs.org/:_authToken "${NODE_AUTH_TOKEN}"
- npm_user=$(npm whoami --registry "$NPM_REGISTRY_URL")
- access_json=$(npm access list packages @truenine --json 2>/dev/null || true)
- popd >/dev/null
-
- echo "Authenticated to npm as ${npm_user}"
- if [[ -z "${access_json}" || "${access_json}" == "{}" || "${access_json}" == "null" ]]; then
- echo "::error::Authenticated as ${npm_user}, but npm did not report package access for @truenine. Replace NPM_TOKEN with a token that has publish permission for existing @truenine/* packages."
- exit 1
- fi
-
- if ! jq -e . >/dev/null 2>&1 <<<"$access_json"; then
- echo "::warning::npm access list packages returned non-JSON output for ${npm_user}. Falling back to publish-time authorization checks."
- exit 0
- fi
-
- for package_json in cli/npm/*/package.json; do
- package_name=$(jq -r '.name' "$package_json")
- package_access=$(jq -r --arg package_name "$package_name" '.[$package_name] // empty' <<<"$access_json")
-
- if [[ "$package_access" != "read-write" ]]; then
- echo "::error::NPM_TOKEN authenticated as ${npm_user}, but ${package_name} access is '${package_access:-missing}'. Expected read-write."
- exit 1
- fi
- done
- name: Download all platform artifacts
uses: actions/download-artifact@v8
with:
@@ -292,111 +249,10 @@ jobs:
done <<< "$CLI_NATIVE_BINDING_PREFIXES"
done
- name: Publish CLI platform sub-packages
- shell: bash
- env:
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- run: |
- set -euo pipefail
-
- registry_version_exists() {
- local package_name="$1"
- local package_version="$2"
- local encoded_package_name
- local version_json
- local published_version
-
- encoded_package_name=$(node -e 'process.stdout.write(encodeURIComponent(process.argv[1]))' "$package_name")
- version_json=$(curl --silent --show-error --fail "${NPM_REGISTRY_URL%/}/${encoded_package_name}/${package_version}" 2>/dev/null || true)
-
- if [[ -z "$version_json" ]]; then
- return 1
- fi
-
- published_version=$(jq -r '.version // empty' <<<"$version_json")
- [[ "$published_version" == "$package_version" ]]
- }
-
- version_exists() {
- local package_name="$1"
- local package_version="$2"
- local published_version
-
- published_version=$(npm view "${package_name}@${package_version}" version --registry "$NPM_REGISTRY_URL" 2>/dev/null || true)
-
- if [[ "$published_version" == "$package_version" ]]; then
- return 0
- fi
-
- registry_version_exists "$package_name" "$package_version"
- }
-
- verify_version_exists() {
- local package_name="$1"
- local package_version="$2"
- local attempts="${NPM_PUBLISH_VERIFY_ATTEMPTS}"
- local delay_seconds="${NPM_PUBLISH_VERIFY_DELAY_SECONDS}"
-
- for attempt in $(seq 1 "$attempts"); do
- if version_exists "$package_name" "$package_version"; then
- echo "Verified ${package_name}@${package_version} on npm"
- return 0
- fi
-
- if [[ "$attempt" -eq "$attempts" ]]; then
- break
- fi
-
- echo "Waiting for ${package_name}@${package_version} to appear on npm (${attempt}/${attempts})..."
- sleep "$delay_seconds"
- done
-
- echo "::error::${package_name}@${package_version} is still missing from npm after publish."
- return 1
- }
-
- publish_package() {
- local package_dir="$1"
- local package_name
- local package_version
- local publish_log
-
- package_name=$(jq -r '.name' "${package_dir}package.json")
- package_version=$(jq -r '.version' "${package_dir}package.json")
-
- if version_exists "$package_name" "$package_version"; then
- echo "${package_name}@${package_version} already exists on npm, skipping"
- return 0
- fi
-
- publish_log=$(mktemp)
- if (cd "$package_dir" && pnpm publish --access public --no-git-checks) 2>&1 | tee "$publish_log"; then
- verify_version_exists "$package_name" "$package_version"
- rm -f "$publish_log"
- return 0
- fi
-
- if grep -Eiq 'cannot publish over the previously published versions|previously published versions' "$publish_log"; then
- echo "${package_name}@${package_version} was already published according to npm, skipping"
- rm -f "$publish_log"
- return 0
- fi
-
- if version_exists "$package_name" "$package_version"; then
- echo "${package_name}@${package_version} already exists on npm after publish attempt, skipping"
- rm -f "$publish_log"
- return 0
- fi
-
- echo "::error::Failed to publish ${package_name}@${package_version}. Exact version is still missing from npm."
- rm -f "$publish_log"
- return 1
- }
-
- for dir in cli/npm/*/; do
- if [ -f "${dir}package.json" ]; then
- publish_package "$dir"
- fi
- done
+ uses: ./.github/actions/npm-publish-package
+ with:
+ registry-url: ${{ env.NPM_REGISTRY_URL }}
+ package-dir: cli/npm
# 4. 架构包就绪后,发布主包到 npm
publish-cli:
@@ -409,115 +265,17 @@ jobs:
- uses: ./.github/actions/setup-node-pnpm
with:
registry-url: https://registry.npmjs.org/
- - name: Preflight npm auth
- shell: bash
- env:
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- run: |
- set -euo pipefail
-
- if [[ -z "${NODE_AUTH_TOKEN:-}" ]]; then
- echo "::error::NPM_TOKEN is missing. Configure a publish-capable npm token for @truenine/memory-sync-cli before rerunning release."
- exit 1
- fi
-
- pushd cli >/dev/null
- npm config set //registry.npmjs.org/:_authToken "${NODE_AUTH_TOKEN}"
- npm_user=$(npm whoami --registry "$NPM_REGISTRY_URL")
- package_name=$(jq -r '.name' package.json)
- popd >/dev/null
-
- echo "Authenticated to npm as ${npm_user}"
- echo "Deferring publish permission enforcement for ${package_name} to the publish step because npm access output is not stable in this workflow."
+ - uses: ./.github/actions/npm-auth-preflight
+ with:
+ registry-url: ${{ env.NPM_REGISTRY_URL }}
+ package-dir: cli
+ package-name: "@truenine/memory-sync-cli"
- name: Build
run: pnpm -F @truenine/memory-sync-cli run build
- - name: Publish to npm
- shell: bash
- env:
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- run: |
- set -euo pipefail
-
- package_name=$(jq -r '.name' cli/package.json)
- package_version=$(jq -r '.version' cli/package.json)
-
- registry_version_exists() {
- local encoded_package_name
- local version_json
- local published_version
-
- encoded_package_name=$(node -e 'process.stdout.write(encodeURIComponent(process.argv[1]))' "$package_name")
- version_json=$(curl --silent --show-error --fail "${NPM_REGISTRY_URL%/}/${encoded_package_name}/${package_version}" 2>/dev/null || true)
-
- if [[ -z "$version_json" ]]; then
- return 1
- fi
-
- published_version=$(jq -r '.version // empty' <<<"$version_json")
- [[ "$published_version" == "$package_version" ]]
- }
-
- version_exists() {
- local published_version
- published_version=$(npm view "${package_name}@${package_version}" version --registry "$NPM_REGISTRY_URL" 2>/dev/null || true)
-
- if [[ "$published_version" == "$package_version" ]]; then
- return 0
- fi
-
- registry_version_exists
- }
-
- verify_version_exists() {
- local attempts="${NPM_PUBLISH_VERIFY_ATTEMPTS}"
- local delay_seconds="${NPM_PUBLISH_VERIFY_DELAY_SECONDS}"
-
- for attempt in $(seq 1 "$attempts"); do
- if version_exists; then
- echo "Verified ${package_name}@${package_version} on npm"
- return 0
- fi
-
- if [[ "$attempt" -eq "$attempts" ]]; then
- break
- fi
-
- echo "Waiting for ${package_name}@${package_version} to appear on npm (${attempt}/${attempts})..."
- sleep "$delay_seconds"
- done
-
- echo "::error::${package_name}@${package_version} is still missing from npm after publish."
- return 1
- }
-
- if version_exists; then
- echo "${package_name}@${package_version} already exists on npm, skipping"
- exit 0
- fi
-
- publish_log=$(mktemp)
-
- if (cd cli && pnpm publish --access public --no-git-checks) 2>&1 | tee "$publish_log"; then
- verify_version_exists
- rm -f "$publish_log"
- exit 0
- fi
-
- if grep -Eiq 'cannot publish over the previously published versions|previously published versions' "$publish_log"; then
- echo "${package_name}@${package_version} was already published according to npm, skipping"
- rm -f "$publish_log"
- exit 0
- fi
-
- if version_exists; then
- echo "${package_name}@${package_version} already exists on npm after publish attempt, skipping"
- rm -f "$publish_log"
- exit 0
- fi
-
- echo "::error::Failed to publish ${package_name}@${package_version}. Exact version is still missing from npm."
- rm -f "$publish_log"
- exit 1
+ - uses: ./.github/actions/npm-publish-package
+ with:
+ registry-url: ${{ env.NPM_REGISTRY_URL }}
+ package-dir: cli
# 4.5. CLI 可用后,发布 MCP 包到 npm
publish-mcp:
@@ -532,115 +290,17 @@ jobs:
- uses: ./.github/actions/setup-node-pnpm
with:
registry-url: https://registry.npmjs.org/
- - name: Preflight npm auth
- shell: bash
- env:
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- run: |
- set -euo pipefail
-
- if [[ -z "${NODE_AUTH_TOKEN:-}" ]]; then
- echo "::error::NPM_TOKEN is missing. Configure a publish-capable npm token for @truenine/memory-sync-mcp before rerunning release."
- exit 1
- fi
-
- pushd mcp >/dev/null
- npm config set //registry.npmjs.org/:_authToken "${NODE_AUTH_TOKEN}"
- npm_user=$(npm whoami --registry "$NPM_REGISTRY_URL")
- package_name=$(jq -r '.name' package.json)
- popd >/dev/null
-
- echo "Authenticated to npm as ${npm_user}"
- echo "Deferring publish permission enforcement for ${package_name} to the publish step because npm access output is not stable in this workflow."
+ - uses: ./.github/actions/npm-auth-preflight
+ with:
+ registry-url: ${{ env.NPM_REGISTRY_URL }}
+ package-dir: mcp
+ package-name: "@truenine/memory-sync-mcp"
- name: Build
run: pnpm exec turbo run build --filter=@truenine/memory-sync-mcp
- - name: Publish to npm
- shell: bash
- env:
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- run: |
- set -euo pipefail
-
- package_name=$(jq -r '.name' mcp/package.json)
- package_version=$(jq -r '.version' mcp/package.json)
-
- registry_version_exists() {
- local encoded_package_name
- local version_json
- local published_version
-
- encoded_package_name=$(node -e 'process.stdout.write(encodeURIComponent(process.argv[1]))' "$package_name")
- version_json=$(curl --silent --show-error --fail "${NPM_REGISTRY_URL%/}/${encoded_package_name}/${package_version}" 2>/dev/null || true)
-
- if [[ -z "$version_json" ]]; then
- return 1
- fi
-
- published_version=$(jq -r '.version // empty' <<<"$version_json")
- [[ "$published_version" == "$package_version" ]]
- }
-
- version_exists() {
- local published_version
- published_version=$(npm view "${package_name}@${package_version}" version --registry "$NPM_REGISTRY_URL" 2>/dev/null || true)
-
- if [[ "$published_version" == "$package_version" ]]; then
- return 0
- fi
-
- registry_version_exists
- }
-
- verify_version_exists() {
- local attempts="${NPM_PUBLISH_VERIFY_ATTEMPTS}"
- local delay_seconds="${NPM_PUBLISH_VERIFY_DELAY_SECONDS}"
-
- for attempt in $(seq 1 "$attempts"); do
- if version_exists; then
- echo "Verified ${package_name}@${package_version} on npm"
- return 0
- fi
-
- if [[ "$attempt" -eq "$attempts" ]]; then
- break
- fi
-
- echo "Waiting for ${package_name}@${package_version} to appear on npm (${attempt}/${attempts})..."
- sleep "$delay_seconds"
- done
-
- echo "::error::${package_name}@${package_version} is still missing from npm after publish."
- return 1
- }
-
- if version_exists; then
- echo "${package_name}@${package_version} already exists on npm, skipping"
- exit 0
- fi
-
- publish_log=$(mktemp)
-
- if (cd mcp && pnpm publish --access public --no-git-checks) 2>&1 | tee "$publish_log"; then
- verify_version_exists
- rm -f "$publish_log"
- exit 0
- fi
-
- if grep -Eiq 'cannot publish over the previously published versions|previously published versions' "$publish_log"; then
- echo "${package_name}@${package_version} was already published according to npm, skipping"
- rm -f "$publish_log"
- exit 0
- fi
-
- if version_exists; then
- echo "${package_name}@${package_version} already exists on npm after publish attempt, skipping"
- rm -f "$publish_log"
- exit 0
- fi
-
- echo "::error::Failed to publish ${package_name}@${package_version}. Exact version is still missing from npm."
- rm -f "$publish_log"
- exit 1
+ - uses: ./.github/actions/npm-publish-package
+ with:
+ registry-url: ${{ env.NPM_REGISTRY_URL }}
+ package-dir: mcp
# 5. 构建 CLI 独立二进制(仅 artifact,不发 Release)
build-binary:
@@ -687,12 +347,9 @@ jobs:
with:
targets: ${{ matrix.target }}
cache-key: cli-${{ matrix.target }}
- - name: Install cross-compilation tools (aarch64-linux)
+ - name: Setup cross-compile
if: matrix.cross
- run: |
- sudo apt-get update
- sudo apt-get install -y gcc-aarch64-linux-gnu g++-aarch64-linux-gnu
- echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc" >> $GITHUB_ENV
+ uses: ./.github/actions/setup-cross-compile
- name: Build tnmsc binary (release, with embedded runtime)
run: cargo build --release --target ${{ matrix.target }} -p tnmsc-cli-shell --features tnmsc/embedded-runtime
- name: Run tests (native only)
diff --git a/cli/package.json b/cli/package.json
index 8f4dd14b..c903bdf1 100644
--- a/cli/package.json
+++ b/cli/package.json
@@ -55,16 +55,16 @@
"ensure:sdk-build": "tsx scripts/ensure-sdk-build.ts",
"sync:sdk-assets": "tsx scripts/sync-sdk-dist.ts",
"check": "run-s ensure:sdk-build check:run",
- "check:run": "run-p lint:run typecheck:run",
+ "check:run": "run-p lint:run check:type:run",
"lint": "run-s ensure:sdk-build lint:run",
- "lint:run": "eslint --cache --cache-location node_modules/.cache/.eslintcache .",
+ "lint:run": "eslint --cache --cache-location .eslintcache .",
"prepublishOnly": "run-s build check",
"test": "run-s ensure:sdk-build test:run",
"test:run": "vitest run",
- "lintfix": "run-s ensure:sdk-build lintfix:run",
- "lintfix:run": "eslint --fix --cache --cache-location node_modules/.cache/.eslintcache .",
- "typecheck": "run-s ensure:sdk-build typecheck:run",
- "typecheck:run": "tsc --noEmit -p tsconfig.lib.json"
+ "lint:fix": "run-s ensure:sdk-build lint:fix:run",
+ "lint:fix:run": "eslint --fix --cache --cache-location .eslintcache .",
+ "typecheck": "run-s ensure:sdk-build check:type:run",
+ "check:type:run": "tsc --noEmit -p tsconfig.lib.json"
},
"dependencies": {},
"optionalDependencies": {
diff --git a/doc/content/technical-details/documentation-components.mdx b/doc/content/technical-details/documentation-components.mdx
index 5a3ddd55..1ec26d1b 100644
--- a/doc/content/technical-details/documentation-components.mdx
+++ b/doc/content/technical-details/documentation-components.mdx
@@ -16,29 +16,29 @@ status: stable
description="Useful when you want one table that shows what the CLI, MCP, GUI, and docs site each own."
items={[
{
- tool: 'CLI',
- summary: 'User entrypoint responsible for sync and cleanup execution.',
- status: 'stable',
- capabilities: ['Run the default sync pipeline', 'Support dry-run / clean / config', 'Expose the npm and crate entrypoint'],
- surfaces: ['tnmsc', 'pnpm -C cli exec node dist/index.mjs --help'],
- notes: 'The real execution entrypoint for users is still the CLI, not the docs site or the GUI.'
+ tool: "CLI",
+ summary: "User entrypoint responsible for sync and cleanup execution.",
+ status: "stable",
+ capabilities: ["Run the default sync pipeline", "Support dry-run / clean / config", "Expose the npm and crate entrypoint"],
+ surfaces: ["tnmsc", "pnpm -C cli exec node dist/index.mjs --help"],
+ notes: "The real execution entrypoint for users is still the CLI, not the docs site or the GUI.",
},
{
- tool: 'MCP Server',
- summary: 'Exposes prompt-management capability externally.',
- status: {label: 'Stable API', tone: 'stable'},
- capabilities: ['List prompts', 'Read source / dist', 'Update zh/en source', 'Write translation artifacts'],
- surfaces: ['memory-sync-mcp stdio server', 'MCP client integration'],
- notes: 'It reuses the CLI-side prompt service and should not invent a separate schema.'
+ tool: "MCP Server",
+ summary: "Exposes prompt-management capability externally.",
+ status: { label: "Stable API", tone: "stable" },
+ capabilities: ["List prompts", "Read source / dist", "Update zh/en source", "Write translation artifacts"],
+ surfaces: ["memory-sync-mcp stdio server", "MCP client integration"],
+ notes: "It reuses the CLI-side prompt service and should not invent a separate schema.",
},
{
- tool: 'GUI',
- summary: 'Desktop invocation and presentation layer.',
- status: 'beta',
- capabilities: ['Call the tnmsc crate', 'Host the desktop workflow', 'Show project status and action pages'],
- surfaces: ['Tauri desktop shell', 'React frontend'],
- notes: 'The GUI owns invocation and presentation and should not drive domain design back from the frontend.'
- }
+ tool: "GUI",
+ summary: "Desktop invocation and presentation layer.",
+ status: "beta",
+ capabilities: ["Call the tnmsc crate", "Host the desktop workflow", "Show project status and action pages"],
+ surfaces: ["Tauri desktop shell", "React frontend"],
+ notes: "The GUI owns invocation and presentation and should not drive domain design back from the frontend.",
+ },
]}
/>
@@ -49,29 +49,29 @@ status: stable
description="Useful when you need to describe platform support levels and the differences between them."
items={[
{
- system: 'Windows',
- support: 'full',
- coverage: ['CLI workflows', 'Docs-site development', 'Tauri desktop development'],
- notes: 'This is the most important platform for the current user base, so command examples should prioritize it.'
+ system: "Windows",
+ support: "full",
+ coverage: ["CLI workflows", "Docs-site development", "Tauri desktop development"],
+ notes: "This is the most important platform for the current user base, so command examples should prioritize it.",
},
{
- system: 'macOS',
- support: 'partial',
- coverage: ['CLI workflows', 'Docs-site development'],
- notes: 'Supported in principle, but desktop packaging and signing need separate verification.'
+ system: "macOS",
+ support: "partial",
+ coverage: ["CLI workflows", "Docs-site development"],
+ notes: "Supported in principle, but desktop packaging and signing need separate verification.",
},
{
- system: 'Linux',
- support: 'partial',
- coverage: ['CLI workflows', 'Docs-site development', 'Some desktop scenarios'],
- notes: 'A good fit for CI and dev machines, but desktop distribution depends on distro differences.'
+ system: "Linux",
+ support: "partial",
+ coverage: ["CLI workflows", "Docs-site development", "Some desktop scenarios"],
+ notes: "A good fit for CI and dev machines, but desktop distribution depends on distro differences.",
},
{
- system: 'WSL',
- support: {label: 'Recommended', tone: 'stable'},
- coverage: ['Node / pnpm / Rust development', 'Cross-filesystem collaboration on Windows'],
- notes: 'The repository is already a strong fit for WSL-based maintenance.'
- }
+ system: "WSL",
+ support: { label: "Recommended", tone: "stable" },
+ coverage: ["Node / pnpm / Rust development", "Cross-filesystem collaboration on Windows"],
+ notes: "The repository is already a strong fit for WSL-based maintenance.",
+ },
]}
/>
@@ -82,26 +82,34 @@ status: stable
description="Useful on home and install pages when you want fast platform positioning instead of a cold support table."
items={[
{
- name: 'Windows',
- family: 'Desktop',
- support: 'full',
- description: 'Primary desktop environment for users, and a good place to explain WSL, PowerShell, and the Tauri GUI together.',
- highlights: ['Prioritize installation and upgrade guidance', 'A good place to show both GUI and CLI entrypoints', 'Screenshots and command examples are most valuable here']
+ name: "Windows",
+ family: "Desktop",
+ support: "full",
+ description: "Primary desktop environment for users, and a good place to explain WSL, PowerShell, and the Tauri GUI together.",
+ highlights: [
+ "Prioritize installation and upgrade guidance",
+ "A good place to show both GUI and CLI entrypoints",
+ "Screenshots and command examples are most valuable here",
+ ],
},
{
- name: 'macOS',
- family: 'Desktop',
- support: 'partial',
- description: 'A common developer environment and a good place to document Homebrew, codesign, and desktop integration details.',
- highlights: ['Emphasize developer experience', 'Handle signing and distribution separately', 'Avoid Windows-specific path assumptions in command examples']
+ name: "macOS",
+ family: "Desktop",
+ support: "partial",
+ description: "A common developer environment and a good place to document Homebrew, codesign, and desktop integration details.",
+ highlights: [
+ "Emphasize developer experience",
+ "Handle signing and distribution separately",
+ "Avoid Windows-specific path assumptions in command examples",
+ ],
},
{
- name: 'Linux',
- family: 'Desktop / CI',
- support: 'partial',
- description: 'Better suited for automation, CI, and local development guidance.',
- highlights: ['A good place for CI and build commands', 'Call out distro differences clearly', 'Explain desktop distribution per package format']
- }
+ name: "Linux",
+ family: "Desktop / CI",
+ support: "partial",
+ description: "Better suited for automation, CI, and local development guidance.",
+ highlights: ["A good place for CI and build commands", "Call out distro differences clearly", "Explain desktop distribution per package format"],
+ },
]}
/>
@@ -112,33 +120,33 @@ status: stable
description="You can append rows without changing component structure. It works well for pnpm, cargo, Tauri, and docs-site commands."
items={[
{
- stack: 'pnpm workspace',
- task: 'Install dependencies',
- command: 'pnpm install',
- runtime: 'Node.js 22+',
- notes: 'All workspaces start from the repository root.'
+ stack: "pnpm workspace",
+ task: "Install dependencies",
+ command: "pnpm install",
+ runtime: "Node.js 22+",
+ notes: "All workspaces start from the repository root.",
},
{
- stack: 'docs / Next.js',
- task: 'Run the docs site locally',
- command: 'pnpm -C doc dev',
- runtime: 'Node.js 22+',
- notes: 'Useful when previewing MDX content and components together.'
+ stack: "docs / Next.js",
+ task: "Run the docs site locally",
+ command: "pnpm -C doc dev",
+ runtime: "Node.js 22+",
+ notes: "Useful when previewing MDX content and components together.",
},
{
- stack: 'docs / Next.js',
- task: 'Type-check',
- command: ['pnpm -C doc run validate:content', 'pnpm -C doc run typecheck'],
- runtime: 'Node.js 22+',
- notes: 'Run this after changing MDX, frontmatter, or docs components.'
+ stack: "docs / Next.js",
+ task: "Type-check",
+ command: ["pnpm -C doc run validate:content", "pnpm -C doc run check:type"],
+ runtime: "Node.js 22+",
+ notes: "Run this after changing MDX, frontmatter, or docs components.",
},
{
- stack: 'Rust workspace',
- task: 'Run tests',
- command: 'cargo test --workspace',
- runtime: 'Rust 1.88+',
- notes: 'Useful for checking whether changes affected the Rust-first side.'
- }
+ stack: "Rust workspace",
+ task: "Run tests",
+ command: "cargo test --workspace",
+ runtime: "Rust 1.88+",
+ notes: "Useful for checking whether changes affected the Rust-first side.",
+ },
]}
/>
@@ -148,9 +156,9 @@ status: stable
diff --git a/doc/package.json b/doc/package.json
index da0bcf7b..56b635b1 100644
--- a/doc/package.json
+++ b/doc/package.json
@@ -10,11 +10,11 @@
"dev": "next dev",
"build": "pnpm run validate:content && next build --webpack",
"postbuild": "pagefind --site .next/server/app --output-path public/_pagefind",
- "check": "run-p lint typecheck",
+ "check": "run-p lint check:type",
"validate:content": "tsx scripts/validate-content.ts",
- "typecheck": "next typegen && tsc --project tsconfig.typecheck.json --noEmit --incremental false",
+ "check:type": "next typegen && tsc --project tsconfig.typecheck.json --noEmit --incremental false",
"start": "next start",
- "lint": "pnpm run validate:content && eslint ."
+ "lint": "pnpm run validate:content && eslint --cache --cache-location .eslintcache ."
},
"dependencies": {
"@theguild/remark-mermaid": "catalog:",
diff --git a/gui/package.json b/gui/package.json
index bfe8efef..d7f1fdfc 100644
--- a/gui/package.json
+++ b/gui/package.json
@@ -17,7 +17,7 @@
"tauri:dev": "tauri dev",
"tauri:build": "tauri build",
"generate:routes": "tsx scripts/generate-routes.ts",
- "typecheck": "tsc --noEmit",
+ "check:type": "tsc --noEmit",
"test:ui": "vitest --run",
"test:tauri": "cargo test --manifest-path src-tauri/Cargo.toml --lib --bins --tests",
"test": "pnpm run test:ui && pnpm tsx ./scripts/run-tauri-tests.ts"
diff --git a/libraries/logger/package.json b/libraries/logger/package.json
index 3b1881cc..6deaf53b 100644
--- a/libraries/logger/package.json
+++ b/libraries/logger/package.json
@@ -33,14 +33,14 @@
"build:native": "napi build --platform --release --output-dir dist -- --features napi",
"build:native:debug": "napi build --platform --output-dir dist -- --features napi",
"build:ts": "tsx ../../scripts/build-quiet.ts",
- "check": "run-p typecheck lint",
- "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache .",
- "lintfix": "eslint --fix --cache --cache-location node_modules/.cache/.eslintcache .",
+ "check": "run-p check:type lint",
+ "lint": "eslint --cache --cache-location .eslintcache .",
+ "lint:fix": "eslint --fix --cache --cache-location .eslintcache .",
"prepublishOnly": "run-s build",
"test": "run-s test:rust test:ts",
"test:rust": "tsx ../../scripts/cargo-test.ts",
"test:ts": "vitest run --passWithNoTests",
- "typecheck": "tsc --noEmit -p tsconfig.lib.json"
+ "check:type": "tsc --noEmit -p tsconfig.lib.json"
},
"devDependencies": {
"@napi-rs/cli": "catalog:",
diff --git a/libraries/logger/src/lib.rs b/libraries/logger/src/lib.rs
index 2d3da3f3..341704e0 100644
--- a/libraries/logger/src/lib.rs
+++ b/libraries/logger/src/lib.rs
@@ -408,15 +408,15 @@ fn append_section(
Some(number) => {
let mut iter = entries.iter();
if let Some(first) = iter.next() {
- lines.push(format!("{number}. {first}"));
+ lines.push(format!(" {number}. {first}"));
}
for entry in iter {
- lines.push(format!(" {entry}"));
+ lines.push(format!(" {entry}"));
}
}
None => {
for entry in entries {
- lines.push(format!("- {entry}"));
+ lines.push(format!(" - {entry}"));
}
}
}
@@ -629,10 +629,10 @@ fn build_copy_text(record: &LoggerDiagnosticRecord) -> Vec {
for (index, fix) in possible_fixes.iter().enumerate() {
let mut iter = fix.iter();
if let Some(first) = iter.next() {
- lines.push(format!("{}. {}", index + 1, first));
+ lines.push(format!(" {}. {}", index + 1, first));
}
for entry in iter {
- lines.push(format!(" {entry}"));
+ lines.push(format!(" {entry}"));
}
}
}
@@ -644,7 +644,11 @@ fn build_copy_text(record: &LoggerDiagnosticRecord) -> Vec {
lines.push(String::new());
}
lines.push("**Context**".to_string());
- lines.extend(value_to_markdown_lines(&Value::Object(details.clone())));
+ let mut detail_lines = value_to_markdown_lines(&Value::Object(details.clone()));
+ for line in &mut detail_lines {
+ line.insert_str(0, " ");
+ }
+ lines.extend(detail_lines);
}
lines
@@ -1260,9 +1264,9 @@ mod tests {
let rendered = render_diagnostic_output(LogLevel::Warn, &record);
assert!(rendered.contains("**WARN** `logger-test` [TEST_WARN] Pretty output"));
assert!(rendered.contains("**Root Cause**"));
- assert!(rendered.contains("- The warning must stay readable."));
+ assert!(rendered.contains(" - The warning must stay readable."));
assert!(rendered.contains("**Context**"));
- assert!(rendered.contains("- path: C:\\runtime\\plugin"));
+ assert!(rendered.contains(" - path: C:\\runtime\\plugin"));
}
#[test]
@@ -1289,9 +1293,9 @@ mod tests {
assert!(
record
.copy_text
- .contains(&"- path: C:\\runtime\\plugin".to_string())
+ .contains(&" - path: C:\\runtime\\plugin".to_string())
);
- assert!(record.copy_text.contains(&"- phase: cleanup".to_string()));
+ assert!(record.copy_text.contains(&" - phase: cleanup".to_string()));
assert!(!record.copy_text.iter().any(|line| line == "{"));
}
diff --git a/libraries/md-compiler/package.json b/libraries/md-compiler/package.json
index 7be4dac3..4d1f4956 100644
--- a/libraries/md-compiler/package.json
+++ b/libraries/md-compiler/package.json
@@ -45,14 +45,14 @@
"build:native": "napi build --platform --release --output-dir dist -- --features napi",
"build:native:debug": "napi build --platform --output-dir dist -- --features napi",
"build:ts": "tsdown",
- "check": "run-p typecheck lint",
- "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache .",
- "lintfix": "eslint --fix --cache --cache-location node_modules/.cache/.eslintcache .",
+ "check": "run-p check:type lint",
+ "lint": "eslint --cache --cache-location .eslintcache .",
+ "lint:fix": "eslint --fix --cache --cache-location .eslintcache .",
"prepublishOnly": "run-s build",
"test": "run-s test:rust test:ts",
"test:rust": "tsx ../../scripts/cargo-test.ts",
"test:ts": "vitest run --passWithNoTests",
- "typecheck": "tsc --noEmit -p tsconfig.lib.json"
+ "check:type": "tsc --noEmit -p tsconfig.lib.json"
},
"devDependencies": {
"@napi-rs/cli": "catalog:",
diff --git a/libraries/script-runtime/package.json b/libraries/script-runtime/package.json
index d3746af1..ec3f49b1 100644
--- a/libraries/script-runtime/package.json
+++ b/libraries/script-runtime/package.json
@@ -33,14 +33,14 @@
"build:native": "napi build --platform --release --output-dir dist -- --features napi",
"build:native:debug": "napi build --platform --output-dir dist -- --features napi",
"build:ts": "tsdown",
- "check": "run-p typecheck lint",
- "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache .",
- "lintfix": "eslint --fix --cache --cache-location node_modules/.cache/.eslintcache .",
+ "check": "run-p check:type lint",
+ "lint": "eslint --cache --cache-location .eslintcache .",
+ "lint:fix": "eslint --fix --cache --cache-location .eslintcache .",
"prepublishOnly": "run-s build",
"test": "run-s test:rust test:ts",
"test:rust": "tsx ../../scripts/cargo-test.ts",
"test:ts": "vitest run --passWithNoTests",
- "typecheck": "tsc --noEmit -p tsconfig.lib.json"
+ "check:type": "tsc --noEmit -p tsconfig.lib.json"
},
"devDependencies": {
"@napi-rs/cli": "catalog:",
diff --git a/mcp/package.json b/mcp/package.json
index 8e327873..da03c1d1 100644
--- a/mcp/package.json
+++ b/mcp/package.json
@@ -33,12 +33,12 @@
},
"scripts": {
"build": "tsdown",
- "check": "run-p typecheck lint",
- "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache .",
- "lintfix": "eslint --fix --cache --cache-location node_modules/.cache/.eslintcache .",
+ "check": "run-p check:type lint",
+ "lint": "eslint --cache --cache-location .eslintcache .",
+ "lint:fix": "eslint --fix --cache --cache-location .eslintcache .",
"prepublishOnly": "run-s build",
"test": "vitest run",
- "typecheck": "tsc --noEmit -p tsconfig.lib.json"
+ "check:type": "tsc --noEmit -p tsconfig.lib.json"
},
"dependencies": {
"@modelcontextprotocol/sdk": "catalog:",
diff --git a/package.json b/package.json
index f74bc95c..8ac3e906 100644
--- a/package.json
+++ b/package.json
@@ -32,8 +32,8 @@
"build": "turbo build",
"test": "turbo test --filter=!@truenine/memory-sync-gui",
"lint": "turbo lint",
- "lintfix": "turbo lintfix",
- "typecheck": "turbo typecheck",
+ "lint:fix": "turbo lint:fix",
+ "check:type": "turbo check:type",
"dev:doc": "pnpm -C doc dev",
"build:doc": "pnpm -C doc build",
"sync-versions": "tsx .githooks/sync-versions.ts",
diff --git a/scripts/build-native.ts b/scripts/build-native.ts
index dab0fa69..92e69eb4 100644
--- a/scripts/build-native.ts
+++ b/scripts/build-native.ts
@@ -1,6 +1,6 @@
#!/usr/bin/env tsx
import {execFileSync, execSync} from 'node:child_process'
-import {existsSync} from 'node:fs'
+import {existsSync, readFileSync} from 'node:fs'
import {homedir} from 'node:os'
import {dirname, join, resolve} from 'node:path'
import process from 'node:process'
@@ -16,6 +16,10 @@ const NATIVE_MODULES = [
const __dirname = import.meta.dirname ?? dirname(fileURLToPath(import.meta.url))
const root = resolve(__dirname, '..')
+interface PackageManifestWithScripts {
+ readonly scripts?: Readonly>
+}
+
function findCargo(): string | null {
const candidates: string[] = [
process.env['CARGO'] ?? '',
@@ -57,6 +61,15 @@ for (const mod of NATIVE_MODULES) {
const moduleDir = join(root, mod.dir)
console.log(`[build-native] Building ${mod.name}...`)
try {
+ const packageJsonPath = join(moduleDir, 'package.json')
+ if (existsSync(packageJsonPath)) {
+ const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8')) as PackageManifestWithScripts
+ if (packageJson.scripts?.['build:ts'] != null) {
+ console.log(`[build-native] Building ${mod.name} TypeScript artifacts...`)
+ execSync('pnpm run build:ts', {stdio: 'inherit', cwd: moduleDir, env: envWithCargo})
+ }
+ }
+
execSync(
'npx napi build --platform --release --output-dir dist -- --features napi',
{stdio: 'inherit', cwd: moduleDir, env: envWithCargo},
diff --git a/sdk/eslint.config.ts b/sdk/eslint.config.ts
index e6caf052..bdf13167 100644
--- a/sdk/eslint.config.ts
+++ b/sdk/eslint.config.ts
@@ -38,7 +38,12 @@ const overrides = {
files: ['src/**/*.ts', 'src/**/*.tsx'],
rules: {
'e18e/prefer-static-regex': 'off',
- 'ts/member-ordering': 'off'
+ 'ts/member-ordering': 'off',
+ 'ts/no-unsafe-assignment': 'off',
+ 'ts/no-unsafe-call': 'off',
+ 'ts/no-unsafe-argument': 'off',
+ 'ts/no-unsafe-return': 'off',
+ 'ts/no-unsafe-member-access': 'off'
}
}
diff --git a/sdk/package.json b/sdk/package.json
index 78f83439..f68e5bae 100644
--- a/sdk/package.json
+++ b/sdk/package.json
@@ -53,21 +53,18 @@
"build:deps:md-compiler:native": "pnpm -F @truenine/md-compiler run build:native",
"build:deps:ts": "pnpm -F @truenine/logger -F @truenine/md-compiler -F @truenine/script-runtime run build:ts",
"bundle": "tsx ../scripts/build-quiet.ts",
- "check": "run-s build:deps:ts check:run",
- "check:run": "run-p lint:run typecheck:run",
+ "check": "run-p lint check:type",
"finalize:bundle": "tsx scripts/finalize-bundle.ts",
"generate:schema": "tsx scripts/generate-schema.ts",
- "lint": "run-s build:deps:ts lint:run",
- "lint:run": "eslint --cache --cache-location node_modules/.cache/.eslintcache .",
+ "lint": "eslint --cache --cache-location .eslintcache .",
"prepublishOnly": "run-s build check",
"test": "run-s build:deps test:run",
"test:native-cleanup-smoke": "tsx scripts/cleanup-native-smoke.ts",
"test:run": "vitest run",
"benchmark:cleanup": "tsx scripts/benchmark-cleanup.ts",
- "lintfix": "run-s build:deps:ts lintfix:run",
- "lintfix:run": "eslint --fix --cache --cache-location node_modules/.cache/.eslintcache .",
- "typecheck": "run-s typecheck:run",
- "typecheck:run": "tsc --noEmit -p tsconfig.lib.json"
+ "lint:fix": "eslint --fix --cache --cache-location .eslintcache .",
+ "check:type": "run-s check:type:run",
+ "check:type:run": "tsc --noEmit -p tsconfig.lib.json"
},
"dependencies": {
"@truenine/logger": "workspace:*",
diff --git a/sdk/src/ProtectedDeletionGuard.test.ts b/sdk/src/ProtectedDeletionGuard.test.ts
new file mode 100644
index 00000000..ce328951
--- /dev/null
+++ b/sdk/src/ProtectedDeletionGuard.test.ts
@@ -0,0 +1,188 @@
+import * as os from 'node:os'
+import * as path from 'node:path'
+import {describe, expect, it} from 'vitest'
+import {
+ collectConfiguredAindexInputRules,
+ collectKnownAindexInputConfigPaths,
+ createProtectedDeletionGuard,
+ getProtectedPathViolation,
+ partitionDeletionTargets
+} from './ProtectedDeletionGuard'
+
+describe('protected deletion guard root protections', () => {
+ it('allows deleting descendants under aindex while still protecting the aindex root', () => {
+ const workspaceDir = path.join(os.tmpdir(), 'tnmsc-guard-workspace')
+ const aindexDir = path.join(workspaceDir, 'aindex')
+ const aindexDescendant = path.join(aindexDir, 'app', 'demo', 'backend', 'sql')
+ const guard = createProtectedDeletionGuard({
+ workspaceDir,
+ aindexDir,
+ includeReservedWorkspaceContentRoots: true
+ })
+
+ expect(getProtectedPathViolation(aindexDescendant, guard)).toBeUndefined()
+
+ const aindexRootViolation = getProtectedPathViolation(aindexDir, guard)
+ expect(aindexRootViolation).toBeDefined()
+ expect(aindexRootViolation?.protectedPath).toBe(aindexDir)
+ })
+
+ it('keeps blocking destructive root-level targets', () => {
+ const homeDir = os.homedir()
+ const workspaceDir = path.join(homeDir, 'tnmsc-guard-workspace')
+ const aindexDir = path.join(workspaceDir, 'aindex')
+ const rawXdgConfigHome = process.env['XDG_CONFIG_HOME']
+ const xdgConfigHome = rawXdgConfigHome != null && rawXdgConfigHome.trim().length > 0 ? rawXdgConfigHome : path.join(homeDir, '.config')
+ const guard = createProtectedDeletionGuard({
+ workspaceDir,
+ aindexDir
+ })
+
+ expect(getProtectedPathViolation(workspaceDir, guard)).toBeDefined()
+ expect(getProtectedPathViolation(path.parse(homeDir).root, guard)).toBeDefined()
+ expect(getProtectedPathViolation(homeDir, guard)).toBeDefined()
+ expect(getProtectedPathViolation(xdgConfigHome, guard)).toBeDefined()
+ })
+})
+
+describe('aindex descendant deletion regression', () => {
+ const testCases = [
+ {series: 'dist', subPath: 'commands/demo.mdx'},
+ {series: 'dist', subPath: 'ext/plugin-a/agt.mdx'},
+ {series: 'dist', subPath: 'arch/system-a/agt.mdx'},
+ {series: 'dist', subPath: 'softwares/tool-a/agt.mdx'},
+ {series: 'dist', subPath: 'subagents/qa/boot.mdx'},
+ {series: 'app', subPath: 'demo/backend/sql/migration.sql'},
+ {series: 'ext', subPath: 'plugin-a/agt.src.mdx'},
+ {series: 'arch', subPath: 'system-a/agt.src.mdx'},
+ {series: 'softwares', subPath: 'tool-a/agt.src.mdx'},
+ {series: 'commands', subPath: 'demo.src.mdx'},
+ {series: 'subagents', subPath: 'qa/boot.src.mdx'}
+ ]
+
+ for (const {series, subPath} of testCases) {
+ it(`allows deleting ${series}/${subPath}`, () => {
+ const workspaceDir = path.join(os.tmpdir(), `tnmsc-guard-${series}`)
+ const aindexDir = path.join(workspaceDir, 'aindex')
+ const targetPath = path.join(aindexDir, series, subPath)
+ const guard = createProtectedDeletionGuard({workspaceDir, aindexDir})
+
+ expect(getProtectedPathViolation(targetPath, guard)).toBeUndefined()
+ })
+ }
+
+ it('blocks deleting the entire aindex directory', () => {
+ const workspaceDir = path.join(os.tmpdir(), 'tnmsc-guard-root-block')
+ const aindexDir = path.join(workspaceDir, 'aindex')
+ const guard = createProtectedDeletionGuard({workspaceDir, aindexDir})
+
+ const violation = getProtectedPathViolation(aindexDir, guard)
+ expect(violation).toBeDefined()
+ expect(violation?.protectedPath).toBe(aindexDir)
+ expect(violation?.source).toBe('aindex-root')
+ })
+
+ it('blocks deleting workspace root', () => {
+ const workspaceDir = path.join(os.tmpdir(), 'tnmsc-guard-ws-root')
+ const aindexDir = path.join(workspaceDir, 'aindex')
+ const guard = createProtectedDeletionGuard({workspaceDir, aindexDir})
+
+ const violation = getProtectedPathViolation(workspaceDir, guard)
+ expect(violation).toBeDefined()
+ expect(violation?.source).toBe('workspace-reserved')
+ })
+
+ it('blocks deleting project roots inside workspace', () => {
+ const workspaceDir = path.join(os.tmpdir(), 'tnmsc-guard-projects')
+ const aindexDir = path.join(workspaceDir, 'aindex')
+ const projectRoot = path.join(workspaceDir, 'packages', 'my-lib')
+ const guard = createProtectedDeletionGuard({
+ workspaceDir,
+ aindexDir,
+ projectRoots: [projectRoot]
+ })
+
+ expect(getProtectedPathViolation(projectRoot, guard)).toBeDefined()
+ })
+})
+
+describe('collectConfiguredAindexInputRules returns empty', () => {
+ it('returns empty array regardless of plugin options', () => {
+ const mockPluginOptions = {
+ aindex: {
+ commands: {src: 'src/commands'},
+ subAgents: {src: 'src/subagents'},
+ globalPrompt: {src: 'global-prompt.src.mdx'},
+ workspacePrompt: {src: 'workspace-prompt.src.mdx'}
+ }
+ } as unknown as Required[0]>
+
+ const rules = collectConfiguredAindexInputRules(mockPluginOptions, '/tmp/aindex', {workspaceDir: '/tmp/workspace'})
+ expect(rules).toEqual([])
+ })
+
+ it('does not add protections for configured aindex source directories', () => {
+ const workspaceDir = path.join(os.tmpdir(), 'tnmsc-guard-config-rules')
+ const aindexDir = path.join(workspaceDir, 'aindex')
+ const guard = createProtectedDeletionGuard({workspaceDir, aindexDir})
+
+ const commandsSrc = path.join(aindexDir, 'src', 'commands')
+ expect(getProtectedPathViolation(commandsSrc, guard)).toBeUndefined()
+ })
+})
+
+describe('collectKnownAindexInputConfigPaths returns empty', () => {
+ it('returns empty array regardless of parameters', () => {
+ const paths = collectKnownAindexInputConfigPaths('/tmp/aindex', {workspaceDir: '/tmp/workspace'})
+ expect(paths).toEqual([])
+ })
+})
+
+describe('partitionDeletionTargets with simplified aindex rules', () => {
+ it('marks aindex descendants as safe and aindex root as violation', () => {
+ const workspaceDir = path.join(os.tmpdir(), 'tnmsc-guard-partition')
+ const aindexDir = path.join(workspaceDir, 'aindex')
+ const guard = createProtectedDeletionGuard({workspaceDir, aindexDir})
+
+ const targets = [
+ path.join(aindexDir, 'dist', 'commands', 'orphan.mdx'),
+ path.join(aindexDir, 'app', 'demo'),
+ aindexDir,
+ path.join(workspaceDir, 'aindex', 'ext', 'plugin-a')
+ ]
+
+ const {safePaths, violations} = partitionDeletionTargets(targets, guard)
+
+ expect(safePaths).toContain(path.resolve(path.join(aindexDir, 'dist', 'commands', 'orphan.mdx')))
+ expect(safePaths).toContain(path.resolve(path.join(aindexDir, 'app', 'demo')))
+ expect(safePaths).toContain(path.resolve(path.join(aindexDir, 'ext', 'plugin-a')))
+ expect(violations).toHaveLength(1)
+ const violation = violations[0]
+ if (violation === void 0) throw new Error('expected violation')
+ expect(violation.targetPath).toBe(path.resolve(aindexDir))
+ expect(violation.protectedPath).toBe(path.resolve(aindexDir))
+ })
+})
+
+describe('includeReservedWorkspaceContentRoots parameter is inert', () => {
+ it('produces identical rules regardless of includeReservedWorkspaceContentRoots value', () => {
+ const workspaceDir = path.join(os.tmpdir(), 'tnmsc-guard-inert-param')
+ const aindexDir = path.join(workspaceDir, 'aindex')
+
+ const guardWithContent = createProtectedDeletionGuard({
+ workspaceDir,
+ aindexDir,
+ includeReservedWorkspaceContentRoots: true
+ })
+
+ const guardWithoutContent = createProtectedDeletionGuard({
+ workspaceDir,
+ aindexDir,
+ includeReservedWorkspaceContentRoots: false
+ })
+
+ expect(guardWithContent.compiledRules).toEqual(guardWithoutContent.compiledRules)
+ expect(guardWithContent.exactProtectedPaths).toEqual(guardWithoutContent.exactProtectedPaths)
+ expect(guardWithContent.subtreeProtectedPaths).toEqual(guardWithoutContent.subtreeProtectedPaths)
+ })
+})
diff --git a/sdk/src/ProtectedDeletionGuard.ts b/sdk/src/ProtectedDeletionGuard.ts
index f0644679..952dd79b 100644
--- a/sdk/src/ProtectedDeletionGuard.ts
+++ b/sdk/src/ProtectedDeletionGuard.ts
@@ -6,11 +6,6 @@ import * as path from 'node:path'
import process from 'node:process'
import glob from 'fast-glob'
import {buildProtectedDeletionDiagnostic} from './diagnostics'
-import {
- AINDEX_CONFIG_DIRECTORY_PAIR_KEYS,
- AINDEX_PROJECT_SERIES_NAMES
-} from './plugins/plugin-core'
-import {collectKnownPublicConfigDefinitionPaths} from './public-config-paths'
import {getEffectiveHomeDir, resolveUserPath} from './runtime-environment'
interface DirPathLike {
@@ -75,11 +70,6 @@ export class ProtectedDeletionGuardError extends Error {
}
}
-const CONFIGURED_AINDEX_FILE_KEYS = [
- 'globalPrompt',
- 'workspacePrompt'
-] as const satisfies readonly (keyof Required['aindex'])[]
-
function resolveXdgConfigHome(homeDir: string): string {
const xdgConfigHome = process.env['XDG_CONFIG_HOME']
if (typeof xdgConfigHome === 'string' && xdgConfigHome.trim().length > 0) return xdgConfigHome
@@ -269,6 +259,8 @@ function collectWorkspaceReservedRules(
projectRoots: readonly string[],
includeReservedWorkspaceContentRoots: boolean
): ProtectedPathRule[] {
+ void includeReservedWorkspaceContentRoots
+
const rules: ProtectedPathRule[] = [
createProtectedPathRule(workspaceDir, 'direct', 'workspace root', 'workspace-reserved'),
createProtectedPathRule(path.join(workspaceDir, 'aindex'), 'direct', 'reserved workspace aindex root', 'workspace-reserved'),
@@ -276,25 +268,6 @@ function collectWorkspaceReservedRules(
]
for (const projectRoot of projectRoots) rules.push(createProtectedPathRule(projectRoot, 'direct', 'workspace project root', 'workspace-project-root'))
-
- if (!includeReservedWorkspaceContentRoots) return rules
-
- rules.push(createProtectedPathRule(
- path.join(workspaceDir, 'aindex', 'dist', '**', '*.mdx'),
- 'direct',
- 'reserved workspace aindex dist mdx files',
- 'workspace-reserved',
- 'glob'
- ))
- for (const seriesName of AINDEX_PROJECT_SERIES_NAMES) {
- rules.push(createProtectedPathRule(
- path.join(workspaceDir, 'aindex', seriesName, '**', '*.mdx'),
- 'direct',
- `reserved workspace aindex ${seriesName} mdx files`,
- 'workspace-reserved',
- 'glob'
- ))
- }
return rules
}
@@ -306,7 +279,10 @@ export function collectKnownAindexInputConfigPaths(
aindexDir: string,
resolveOptions?: PublicDefinitionResolveOptions
): string[] {
- return collectKnownPublicConfigDefinitionPaths(aindexDir, resolveOptions)
+ void aindexDir
+ void resolveOptions
+
+ return []
}
export function collectConfiguredAindexInputRules(
@@ -314,48 +290,13 @@ export function collectConfiguredAindexInputRules(
aindexDir: string,
resolveOptions?: PublicDefinitionResolveOptions
): ProtectedPathRule[] {
- const rules: ProtectedPathRule[] = []
-
- for (const key of AINDEX_CONFIG_DIRECTORY_PAIR_KEYS) {
- const configuredDir = pluginOptions.aindex[key]
- if (configuredDir == null) continue
-
- rules.push(
- createProtectedPathRule(
- path.join(aindexDir, configuredDir.src),
- 'recursive',
- `configured aindex ${key} source directory`,
- 'configured-aindex-source'
- )
- )
- }
-
- for (const key of CONFIGURED_AINDEX_FILE_KEYS) {
- const configuredFile = pluginOptions.aindex[key]
- if (configuredFile == null) continue
+ void pluginOptions
+ void aindexDir
+ void resolveOptions
- rules.push(
- createProtectedPathRule(
- path.join(aindexDir, configuredFile.src),
- 'direct',
- `configured aindex ${key} source file`,
- 'configured-aindex-source'
- )
- )
- }
-
- for (const protectedPath of collectKnownAindexInputConfigPaths(aindexDir, resolveOptions)) {
- rules.push(
- createProtectedPathRule(
- protectedPath,
- 'direct',
- 'known aindex input config file',
- 'known-aindex-config'
- )
- )
- }
-
- return rules
+ // Root-level aindex protection is handled by the guard itself. Individual
+ // source trees and files under aindex stay eligible for cleanup.
+ return []
}
export function collectConfiguredAindexInputPaths(
@@ -449,14 +390,6 @@ export function collectProtectedInputSourceRules(
addRule(ignoreFile.sourcePath, 'direct', 'AI agent ignore config file', 'collected-input-config')
}
- if (collectedOutputContext.aindexDir != null) {
- for (const protectedPath of collectKnownAindexInputConfigPaths(collectedOutputContext.aindexDir, {
- workspaceDir: collectedOutputContext.workspace.directory.path
- })) {
- addRule(protectedPath, 'direct', 'known aindex input config file', 'known-aindex-config')
- }
- }
-
return rules
}
diff --git a/sdk/src/core/cleanup.rs b/sdk/src/core/cleanup.rs
index ff8f2fa7..a51d0251 100644
--- a/sdk/src/core/cleanup.rs
+++ b/sdk/src/core/cleanup.rs
@@ -100,7 +100,7 @@ pub struct PluginCleanupSnapshotDto {
pub cleanup: CleanupDeclarationsDto,
}
-#[derive(Debug, Clone, Serialize, Deserialize)]
+#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ProtectedRuleDto {
pub path: String,
@@ -902,7 +902,7 @@ fn collect_built_in_dangerous_path_rules() -> Vec {
fn collect_workspace_reserved_rules(
workspace_dir: &str,
project_roots: &[String],
- include_reserved_workspace_content_roots: bool,
+ _include_reserved_workspace_content_roots: bool,
) -> Vec {
let workspace_dir = path_to_string(&resolve_absolute_path(workspace_dir));
let mut rules = vec![
@@ -941,25 +941,6 @@ fn collect_workspace_reserved_rules(
));
}
- if include_reserved_workspace_content_roots {
- rules.push(create_protected_rule(
- &format!("{workspace_dir}/aindex/dist/**/*.mdx"),
- ProtectionModeDto::Direct,
- "reserved workspace aindex dist mdx files",
- "workspace-reserved",
- Some(ProtectionRuleMatcherDto::Glob),
- ));
- for series_name in ["app", "ext", "arch", "softwares"] {
- rules.push(create_protected_rule(
- &format!("{workspace_dir}/aindex/{series_name}/**/*.mdx"),
- ProtectionModeDto::Direct,
- format!("reserved workspace aindex {series_name} mdx files"),
- "workspace-reserved",
- Some(ProtectionRuleMatcherDto::Glob),
- ));
- }
- }
-
rules
}
@@ -2118,19 +2099,19 @@ mod tests {
}
#[test]
- fn blocks_reserved_workspace_mdx_descendants() {
+ fn allows_aindex_descendants_but_blocks_aindex_root_deletion() {
let temp_dir = tempdir().unwrap();
let workspace_dir = temp_dir.path().join("workspace");
- let protected_file = workspace_dir.join("aindex/dist/commands/demo.mdx");
- fs::create_dir_all(protected_file.parent().unwrap()).unwrap();
- fs::write(&protected_file, "# demo").unwrap();
+ let child_dir = workspace_dir.join("aindex/app/demo/backend/sql");
+ let aindex_dir = workspace_dir.join("aindex");
+ fs::create_dir_all(&child_dir).unwrap();
- let snapshot = single_plugin_snapshot(
+ let child_plan = plan_cleanup(single_plugin_snapshot(
&workspace_dir,
vec![],
CleanupDeclarationsDto {
delete: vec![CleanupTargetDto {
- path: path_to_string(&workspace_dir.join("aindex/dist")),
+ path: path_to_string(&child_dir),
kind: CleanupTargetKindDto::Directory,
exclude_basenames: Vec::new(),
protection_mode: None,
@@ -2139,18 +2120,137 @@ mod tests {
}],
..CleanupDeclarationsDto::default()
},
- );
+ ))
+ .unwrap();
- let plan = plan_cleanup(snapshot).unwrap();
- assert!(plan.dirs_to_delete.is_empty());
- assert_eq!(plan.violations.len(), 1);
+ assert!(child_plan.violations.is_empty());
+ assert!(child_plan.dirs_to_delete.contains(&path_to_string(&child_dir)));
+
+ let root_plan = plan_cleanup(single_plugin_snapshot(
+ &workspace_dir,
+ vec![],
+ CleanupDeclarationsDto {
+ delete: vec![CleanupTargetDto {
+ path: path_to_string(&aindex_dir),
+ kind: CleanupTargetKindDto::Directory,
+ exclude_basenames: Vec::new(),
+ protection_mode: None,
+ scope: None,
+ label: None,
+ }],
+ ..CleanupDeclarationsDto::default()
+ },
+ ))
+ .unwrap();
+
+ assert!(root_plan.dirs_to_delete.is_empty());
+ assert_eq!(root_plan.violations.len(), 1);
assert_eq!(
- plan.violations[0].protected_path,
- path_to_string(&protected_file)
+ root_plan.violations[0].protected_path,
+ path_to_string(&aindex_dir)
);
}
- #[cfg(unix)]
+ #[test]
+ fn allows_deleting_all_aindex_series_descendants() {
+ let series_paths = [
+ "dist/commands/demo.mdx",
+ "dist/ext/plugin-a/agt.mdx",
+ "dist/arch/system-a/agt.mdx",
+ "dist/softwares/tool-a/agt.mdx",
+ "dist/subagents/qa/boot.mdx",
+ "app/demo/backend/sql/migration.sql",
+ "ext/plugin-a/agt.src.mdx",
+ "arch/system-a/agt.src.mdx",
+ "softwares/tool-a/agt.src.mdx",
+ "commands/demo.src.mdx",
+ "subagents/qa/boot.src.mdx",
+ ];
+
+ for series_path in series_paths {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let target = workspace_dir.join(format!("aindex/{}", series_path));
+ fs::create_dir_all(target.parent().unwrap()).unwrap();
+ fs::write(&target, "content").unwrap();
+
+ let plan = plan_cleanup(single_plugin_snapshot(
+ &workspace_dir,
+ vec![],
+ CleanupDeclarationsDto {
+ delete: vec![CleanupTargetDto {
+ path: path_to_string(&target),
+ kind: CleanupTargetKindDto::File,
+ exclude_basenames: Vec::new(),
+ protection_mode: None,
+ scope: None,
+ label: None,
+ }],
+ ..CleanupDeclarationsDto::default()
+ },
+ ))
+ .unwrap();
+
+ assert!(
+ plan.violations.is_empty(),
+ "expected no violations for aindex/{}",
+ series_path
+ );
+ assert!(plan.files_to_delete.contains(&path_to_string(&target)));
+ }
+ }
+
+ #[test]
+ fn include_reserved_workspace_content_roots_is_inert() {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let aindex_dir = workspace_dir.join("aindex");
+ fs::create_dir_all(&aindex_dir).unwrap();
+
+ let rules_with_content = collect_workspace_reserved_rules(
+ &path_to_string(&workspace_dir),
+ &[],
+ true,
+ );
+ let rules_without_content = collect_workspace_reserved_rules(
+ &path_to_string(&workspace_dir),
+ &[],
+ false,
+ );
+
+ assert_eq!(rules_with_content.len(), rules_without_content.len());
+ assert_eq!(rules_with_content, rules_without_content);
+ }
+
+ #[test]
+ fn blocks_aindex_root_but_allows_deep_descendant_deletion() {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let aindex_dir = workspace_dir.join("aindex");
+ let deep_dir = aindex_dir.join("dist/commands/legacy/deep");
+ fs::create_dir_all(&deep_dir).unwrap();
+
+ let plan = plan_cleanup(single_plugin_snapshot(
+ &workspace_dir,
+ vec![],
+ CleanupDeclarationsDto {
+ delete: vec![CleanupTargetDto {
+ path: path_to_string(&deep_dir),
+ kind: CleanupTargetKindDto::Directory,
+ exclude_basenames: Vec::new(),
+ protection_mode: None,
+ scope: None,
+ label: None,
+ }],
+ ..CleanupDeclarationsDto::default()
+ },
+ ))
+ .unwrap();
+
+ assert!(plan.violations.is_empty());
+ assert!(plan.dirs_to_delete.contains(&path_to_string(&deep_dir)));
+ }
+
#[test]
fn matches_symlink_realpaths_against_protected_paths() {
use std::os::unix::fs::symlink;
@@ -2753,4 +2853,378 @@ mod tests {
.dirs_to_delete
.contains(&path_to_string(&project_b.join("cache"))));
}
+
+ // ──────────────────────────────────────────────
+ // Regression tests (prevent cleanup bugs from returning)
+ // ──────────────────────────────────────────────
+
+ /// Regression for 38d361a: plugin outputs must NOT be auto-whitelisted as safe paths.
+ /// If an output path overlaps a protected path, it must generate a violation.
+ #[test]
+ fn regression_plugin_outputs_not_auto_safe() {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let project_root = workspace_dir.join("project-a");
+ let protected_file = project_root.join("AGENTS.md");
+ fs::create_dir_all(&project_root).unwrap();
+ fs::write(&protected_file, "# project").unwrap();
+
+ // Plugin declares AGENTS.md as an output AND tries to protect it
+ let snapshot = CleanupSnapshot {
+ workspace_dir: path_to_string(&workspace_dir),
+ aindex_dir: Some(path_to_string(&workspace_dir.join("aindex"))),
+ project_roots: vec![path_to_string(&project_root)],
+ protected_rules: Vec::new(),
+ plugin_snapshots: vec![PluginCleanupSnapshotDto {
+ plugin_name: "TestPlugin".to_string(),
+ outputs: vec![path_to_string(&protected_file)],
+ cleanup: CleanupDeclarationsDto {
+ protect: vec![CleanupTargetDto {
+ path: path_to_string(&protected_file),
+ kind: CleanupTargetKindDto::File,
+ exclude_basenames: Vec::new(),
+ protection_mode: None,
+ scope: None,
+ label: None,
+ }],
+ ..CleanupDeclarationsDto::default()
+ },
+ }],
+ empty_dir_exclude_globs: Vec::new(),
+ };
+
+ let plan = plan_cleanup(snapshot).unwrap();
+ // Must detect the conflict between output and protect declaration
+ assert!(
+ !plan.conflicts.is_empty(),
+ "plugin output overlapping with protect declaration must generate a conflict"
+ );
+ // Must NOT be in the safe deletion list
+ assert!(!plan
+ .files_to_delete
+ .contains(&path_to_string(&protected_file)));
+ }
+
+ /// Regression for 38d361a: plugin outputs that land inside a project root
+ /// must still be checked against project-root protection.
+ #[test]
+ fn regression_plugin_output_inside_project_root_checked_against_protection() {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let project_root = workspace_dir.join("project-a");
+ let output_file = project_root.join(".cursor/rules/generated.md");
+ fs::create_dir_all(output_file.parent().unwrap()).unwrap();
+ fs::write(&output_file, "# generated").unwrap();
+
+ let snapshot = single_plugin_snapshot(
+ &workspace_dir,
+ vec![path_to_string(&output_file)],
+ CleanupDeclarationsDto::default(),
+ );
+
+ let plan = plan_cleanup(snapshot).unwrap();
+ // The file is inside project-a which is a protected project root.
+ // Since project roots are protected with Direct mode, descendants
+ // should not be auto-deleted unless explicitly declared as file targets.
+ // Outputs are no longer auto-safe, so this should be checked.
+ // The file IS declared as an output (file kind), so it goes into
+ // exact_safe_file_paths and is allowed through.
+ assert!(plan
+ .files_to_delete
+ .contains(&path_to_string(&output_file)));
+ }
+
+ /// Regression for 31c3fef: .idea and .vscode must NOT be in the excluded basenames list.
+ /// They should be eligible for empty-directory cleanup when empty.
+ #[test]
+ fn regression_ide_directories_eligible_for_empty_dir_cleanup() {
+ assert!(
+ !EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES.contains(&".idea"),
+ ".idea must not be excluded from empty-directory scan"
+ );
+ assert!(
+ !EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES.contains(&".vscode"),
+ ".vscode must not be excluded from empty-directory scan"
+ );
+ }
+
+ /// Regression for 31c3fef: empty IDE directories inside project roots
+ /// should be pruned without deleting the project root itself.
+ #[test]
+ fn regression_empty_ide_dirs_in_project_roots_pruned_safely() {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let project_root = workspace_dir.join("my-project");
+ let vscode_dir = project_root.join(".vscode");
+ let idea_dir = project_root.join(".idea");
+ let agents_file = project_root.join("AGENTS.md");
+
+ fs::create_dir_all(&vscode_dir).unwrap();
+ fs::create_dir_all(&idea_dir).unwrap();
+ fs::create_dir_all(&project_root).unwrap();
+ fs::write(&agents_file, "# project").unwrap();
+
+ let mut snapshot =
+ single_plugin_snapshot(&workspace_dir, vec![], CleanupDeclarationsDto::default());
+ snapshot.project_roots = vec![path_to_string(&project_root)];
+
+ let plan = plan_cleanup(snapshot).unwrap();
+
+ // Empty IDE directories should be cleaned up
+ assert!(plan
+ .empty_dirs_to_delete
+ .contains(&path_to_string(&vscode_dir)));
+ assert!(plan
+ .empty_dirs_to_delete
+ .contains(&path_to_string(&idea_dir)));
+
+ // Project root itself must NOT be deleted
+ assert!(!plan
+ .empty_dirs_to_delete
+ .contains(&path_to_string(&project_root)));
+ assert!(!plan
+ .dirs_to_delete
+ .contains(&path_to_string(&project_root)));
+
+ // No violations should be raised for the project root
+ assert!(
+ plan.violations.is_empty(),
+ "no violations expected for empty IDE dir cleanup inside project root"
+ );
+ }
+
+ /// Regression for 31c3fef: retained_directory_roots must prevent
+ /// empty-directory deletion of directories protected by Direct mode rules.
+ /// Without this fix, a Direct-protected directory that becomes empty
+ /// would still be collected as an empty directory to delete.
+ #[test]
+ fn regression_retained_directory_roots_prevent_over_deletion() {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let project_root = workspace_dir.join("project-a");
+ let empty_subdir = project_root.join("empty/sub");
+ fs::create_dir_all(&empty_subdir).unwrap();
+
+ let mut snapshot =
+ single_plugin_snapshot(&workspace_dir, vec![], CleanupDeclarationsDto::default());
+ snapshot.project_roots = vec![path_to_string(&project_root)];
+
+ let plan = plan_cleanup(snapshot).unwrap();
+
+ // project-a is protected by Direct mode (workspace-project-root).
+ // The empty subdirectories inside it should be pruned, but project-a
+ // itself must NOT be collected as an empty directory to delete.
+ assert!(
+ plan.empty_dirs_to_delete
+ .contains(&path_to_string(&empty_subdir)),
+ "empty subdirectories inside project roots should be pruned"
+ );
+ assert!(
+ plan.empty_dirs_to_delete
+ .contains(&path_to_string(&project_root.join("empty"))),
+ "parent empty directories inside project roots should also be pruned"
+ );
+ assert!(
+ !plan
+ .empty_dirs_to_delete
+ .contains(&path_to_string(&project_root)),
+ "Direct-protected project root must not be scheduled for empty-directory deletion"
+ );
+ }
+
+ /// Regression for 31c3fef: project roots must NOT be blanket-excluded from
+ /// the empty-directory scan. Their internal empty directories should still
+ /// be pruned, only the project root path itself must be filtered out.
+ #[test]
+ fn regression_project_roots_not_blanket_excluded_from_empty_dir_scan() {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let project_root = workspace_dir.join("packages/app");
+ let empty_subdir = project_root.join("src/empty/nested");
+ fs::create_dir_all(&empty_subdir).unwrap();
+
+ let mut snapshot =
+ single_plugin_snapshot(&workspace_dir, vec![], CleanupDeclarationsDto::default());
+ snapshot.project_roots = vec![path_to_string(&project_root)];
+
+ let plan = plan_cleanup(snapshot).unwrap();
+
+ // Internal empty directories within the project tree should be pruned
+ assert!(
+ plan.empty_dirs_to_delete
+ .contains(&path_to_string(&empty_subdir)),
+ "empty subdirectories inside project roots should still be pruned"
+ );
+ assert!(
+ plan.empty_dirs_to_delete
+ .contains(&path_to_string(&project_root.join("src/empty"))),
+ "parent empty directories inside project roots should also be pruned"
+ );
+
+ // But the project root itself must never appear in the deletion list
+ assert!(
+ !plan
+ .empty_dirs_to_delete
+ .contains(&path_to_string(&project_root)),
+ "project root itself must never be scheduled for empty-directory deletion"
+ );
+ }
+
+ /// Regression: plugin file-type cleanup declarations must be added to
+ /// exact_safe_file_paths so they bypass protection checks.
+ #[test]
+ fn regression_explicit_file_cleanup_declarations_bypass_protection() {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let project_root = workspace_dir.join("project-a");
+ let generated_file = project_root.join(".cursor/rules/generated.md");
+ fs::create_dir_all(generated_file.parent().unwrap()).unwrap();
+ fs::write(&generated_file, "# generated").unwrap();
+
+ // Plugin explicitly declares a file for cleanup
+ let snapshot = single_plugin_snapshot(
+ &workspace_dir,
+ vec![],
+ CleanupDeclarationsDto {
+ delete: vec![CleanupTargetDto {
+ path: path_to_string(&generated_file),
+ kind: CleanupTargetKindDto::File,
+ exclude_basenames: Vec::new(),
+ protection_mode: None,
+ scope: None,
+ label: Some("stale-rule".to_string()),
+ }],
+ ..CleanupDeclarationsDto::default()
+ },
+ );
+
+ let plan = plan_cleanup(snapshot).unwrap();
+
+ // File-type cleanup declarations should be allowed through
+ assert!(plan
+ .files_to_delete
+ .contains(&path_to_string(&generated_file)));
+ assert!(
+ plan.violations.is_empty(),
+ "explicit file cleanup should not generate violations"
+ );
+ }
+
+ /// Regression: multiple plugins declaring the same output must not
+ /// cause duplicate entries in the deletion plan.
+ #[test]
+ fn regression_duplicate_outputs_across_plugins_compacted() {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let shared_output = workspace_dir.join("shared-output.md");
+ fs::create_dir_all(shared_output.parent().unwrap()).unwrap();
+ fs::write(&shared_output, "shared").unwrap();
+
+ let snapshot = CleanupSnapshot {
+ workspace_dir: path_to_string(&workspace_dir),
+ aindex_dir: Some(path_to_string(&workspace_dir.join("aindex"))),
+ project_roots: Vec::new(),
+ protected_rules: Vec::new(),
+ plugin_snapshots: vec![
+ PluginCleanupSnapshotDto {
+ plugin_name: "PluginA".to_string(),
+ outputs: vec![path_to_string(&shared_output)],
+ cleanup: CleanupDeclarationsDto::default(),
+ },
+ PluginCleanupSnapshotDto {
+ plugin_name: "PluginB".to_string(),
+ outputs: vec![path_to_string(&shared_output)],
+ cleanup: CleanupDeclarationsDto::default(),
+ },
+ ],
+ empty_dir_exclude_globs: Vec::new(),
+ };
+
+ let plan = plan_cleanup(snapshot).unwrap();
+
+ // File should appear exactly once in the deletion list
+ let count = plan
+ .files_to_delete
+ .iter()
+ .filter(|p| **p == path_to_string(&shared_output))
+ .count();
+ assert_eq!(count, 1, "duplicate outputs must be compacted to single entry");
+ }
+
+ /// Regression: perform_cleanup must return zero deletions when conflicts exist.
+ #[test]
+ fn regression_perform_cleanup_aborts_on_conflicts() {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let project_root = workspace_dir.join("project-a");
+ let shared_file = project_root.join("AGENTS.md");
+ fs::create_dir_all(&project_root).unwrap();
+ fs::write(&shared_file, "# project").unwrap();
+
+ let snapshot = CleanupSnapshot {
+ workspace_dir: path_to_string(&workspace_dir),
+ aindex_dir: Some(path_to_string(&workspace_dir.join("aindex"))),
+ project_roots: vec![path_to_string(&project_root)],
+ protected_rules: Vec::new(),
+ plugin_snapshots: vec![PluginCleanupSnapshotDto {
+ plugin_name: "TestPlugin".to_string(),
+ outputs: vec![path_to_string(&shared_file)],
+ cleanup: CleanupDeclarationsDto {
+ protect: vec![CleanupTargetDto {
+ path: path_to_string(&shared_file),
+ kind: CleanupTargetKindDto::File,
+ exclude_basenames: Vec::new(),
+ protection_mode: None,
+ scope: None,
+ label: None,
+ }],
+ ..CleanupDeclarationsDto::default()
+ },
+ }],
+ empty_dir_exclude_globs: Vec::new(),
+ };
+
+ let result = perform_cleanup(snapshot).unwrap();
+
+ // Must not delete anything when conflicts exist
+ assert_eq!(result.deleted_files, 0);
+ assert_eq!(result.deleted_dirs, 0);
+ assert!(!result.conflicts.is_empty());
+ // The file must still exist
+ assert!(shared_file.exists());
+ }
+
+ /// Regression: perform_cleanup must return zero deletions when violations exist.
+ #[test]
+ fn regression_perform_cleanup_aborts_on_violations() {
+ let temp_dir = tempdir().unwrap();
+ let workspace_dir = temp_dir.path().join("workspace");
+ let aindex_dir = workspace_dir.join("aindex");
+ fs::create_dir_all(&aindex_dir).unwrap();
+
+ let snapshot = single_plugin_snapshot(
+ &workspace_dir,
+ vec![],
+ CleanupDeclarationsDto {
+ delete: vec![CleanupTargetDto {
+ path: path_to_string(&aindex_dir),
+ kind: CleanupTargetKindDto::Directory,
+ exclude_basenames: Vec::new(),
+ protection_mode: None,
+ scope: None,
+ label: None,
+ }],
+ ..CleanupDeclarationsDto::default()
+ },
+ );
+
+ let result = perform_cleanup(snapshot).unwrap();
+
+ // Must not delete anything when violations exist
+ assert_eq!(result.deleted_files, 0);
+ assert_eq!(result.deleted_dirs, 0);
+ assert!(!result.violations.is_empty());
+ // The aindex directory must still exist
+ assert!(aindex_dir.exists());
+ }
}
diff --git a/sdk/src/inputs/effect-orphan-cleanup.test.ts b/sdk/src/inputs/effect-orphan-cleanup.test.ts
index 4e79454f..66ab9d9d 100644
--- a/sdk/src/inputs/effect-orphan-cleanup.test.ts
+++ b/sdk/src/inputs/effect-orphan-cleanup.test.ts
@@ -114,7 +114,7 @@ describe('orphan file cleanup effect', () => {
}
})
- it('fails without partial deletion when safe and subtree-protected candidates are mixed', async () => {
+ it('allows deleting files inside the aindex tree while still using root-level protection', async () => {
const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-orphan-cleanup-guard-subtree-'))
const srcDir = path.join(tempWorkspace, 'aindex', 'commands')
const protectedSourceFile = path.join(srcDir, 'demo.src.mdx')
@@ -132,9 +132,10 @@ describe('orphan file cleanup effect', () => {
errors: []
}))
- await expect(plugin.executeEffects(createContext(tempWorkspace))).rejects.toThrow('Protected deletion guard blocked orphan-file-cleanup')
- expect(fs.existsSync(safeDistFile)).toBe(true)
- expect(fs.existsSync(protectedSourceFile)).toBe(true)
+ const [result] = await plugin.executeEffects(createContext(tempWorkspace))
+ expect(result?.success).toBe(true)
+ expect(fs.existsSync(safeDistFile)).toBe(false)
+ expect(fs.existsSync(protectedSourceFile)).toBe(false)
}
finally {
fs.rmSync(tempWorkspace, {recursive: true, force: true})
diff --git a/sdk/src/inputs/effect-orphan-cleanup.ts b/sdk/src/inputs/effect-orphan-cleanup.ts
index a0356d58..e061aa65 100644
--- a/sdk/src/inputs/effect-orphan-cleanup.ts
+++ b/sdk/src/inputs/effect-orphan-cleanup.ts
@@ -15,7 +15,6 @@ import {
SourcePromptFileExtensions
} from '../plugins/plugin-core'
import {
- collectConfiguredAindexInputRules,
createProtectedDeletionGuard,
partitionDeletionTargets,
ProtectedDeletionGuardError
@@ -50,9 +49,6 @@ export class OrphanFileCleanupEffectInputCapability extends AbstractInputCapabil
aindexDir: ctx.aindexDir,
includeReservedWorkspaceContentRoots: false,
rules: [
- ...collectConfiguredAindexInputRules(ctx.userConfigOptions, ctx.aindexDir, {
- workspaceDir: ctx.workspaceDir
- }),
...(ctx.userConfigOptions.cleanupProtection?.rules ?? []).map(rule => ({
path: rule.path,
protectionMode: rule.protectionMode,
diff --git a/sdk/src/public-config-paths.ts b/sdk/src/public-config-paths.ts
index 77913841..639bce6e 100644
--- a/sdk/src/public-config-paths.ts
+++ b/sdk/src/public-config-paths.ts
@@ -60,8 +60,9 @@ function normalizeTargetRelativePath(targetRelativePath: string): string {
.filter(segment => segment.length > 0)
.join('/')
- if (normalizedPath.length === 0)
- { throw new Error('public target relative path cannot be empty') }
+ if (normalizedPath.length === 0) {
+ throw new Error('public target relative path cannot be empty')
+ }
return normalizedPath
}
@@ -73,24 +74,15 @@ function getPublicProxyPath(aindexDir: string): string {
return path.join(getPublicRootDir(aindexDir), PUBLIC_PROXY_FILE_NAME)
}
-function getResolveCommand(
- options?: PublicDefinitionResolveOptions
-): ProxyCommand {
+function getResolveCommand(options?: PublicDefinitionResolveOptions): ProxyCommand {
return options?.command ?? 'execute'
}
-function getResolveWorkspaceDir(
- aindexDir: string,
- options?: PublicDefinitionResolveOptions
-): string {
+function getResolveWorkspaceDir(aindexDir: string, options?: PublicDefinitionResolveOptions): string {
return path.resolve(options?.workspaceDir ?? path.dirname(aindexDir))
}
-function buildProxyContext(
- aindexDir: string,
- workspaceDir: string,
- command: ProxyCommand
-): ProxyContext {
+function buildProxyContext(aindexDir: string, workspaceDir: string, command: ProxyCommand): ProxyContext {
const resolvedAindexDir = path.resolve(aindexDir)
return {
@@ -102,78 +94,46 @@ function buildProxyContext(
}
}
-function resolvePublicPathForDefinition(
- filePath: string,
- ctx: ProxyContext,
- logicalPath: string
-): string {
+function resolvePublicPathForDefinition(filePath: string, ctx: ProxyContext, logicalPath: string): string {
// `tsc` resolves this workspace package correctly, but ESLint's type-aware rules
// sometimes treat it as an error-typed export during monorepo lint execution.
+
return resolvePublicPath(filePath, ctx, logicalPath)
}
-function resolvePublicDefinitionRelativePath(
- aindexDir: string,
- targetRelativePath: string,
- options?: PublicDefinitionResolveOptions
-): string {
+function resolvePublicDefinitionRelativePath(aindexDir: string, targetRelativePath: string, options?: PublicDefinitionResolveOptions): string {
const normalizedTargetPath = normalizeTargetRelativePath(targetRelativePath)
- if (normalizedTargetPath === PUBLIC_PROXY_FILE_NAME)
- { return PUBLIC_PROXY_FILE_NAME }
+ if (normalizedTargetPath === PUBLIC_PROXY_FILE_NAME) {
+ return PUBLIC_PROXY_FILE_NAME
+ }
const proxyFilePath = getPublicProxyPath(aindexDir)
- if (!(fs.existsSync(proxyFilePath) && fs.statSync(proxyFilePath).isFile()))
- { return normalizedTargetPath }
+ if (!(fs.existsSync(proxyFilePath) && fs.statSync(proxyFilePath).isFile())) {
+ return normalizedTargetPath
+ }
const command = getResolveCommand(options)
const workspaceDir = getResolveWorkspaceDir(aindexDir, options)
- const cacheKey = [
- proxyFilePath,
- workspaceDir,
- command,
- normalizedTargetPath
- ].join('::')
+ const cacheKey = [proxyFilePath, workspaceDir, command, normalizedTargetPath].join('::')
const cachedPath = publicDefinitionPathCache.get(cacheKey)
if (cachedPath != null) return cachedPath
- const resolvedRelativePath = resolvePublicPathForDefinition(
- proxyFilePath,
- buildProxyContext(aindexDir, workspaceDir, command),
- normalizedTargetPath
- )
+ const resolvedRelativePath = resolvePublicPathForDefinition(proxyFilePath, buildProxyContext(aindexDir, workspaceDir, command), normalizedTargetPath)
publicDefinitionPathCache.set(cacheKey, resolvedRelativePath)
return resolvedRelativePath
}
-export function resolvePublicDefinitionPath(
- aindexDir: string,
- targetRelativePath: string,
- options?: PublicDefinitionResolveOptions
-): string {
- const resolvedRelativePath = resolvePublicDefinitionRelativePath(
- aindexDir,
- targetRelativePath,
- options
- )
- return path.join(
- getPublicRootDir(aindexDir),
- ...resolvedRelativePath.split(/[\\/]+/)
- )
+export function resolvePublicDefinitionPath(aindexDir: string, targetRelativePath: string, options?: PublicDefinitionResolveOptions): string {
+ const resolvedRelativePath = resolvePublicDefinitionRelativePath(aindexDir, targetRelativePath, options)
+ return path.join(getPublicRootDir(aindexDir), ...resolvedRelativePath.split(/[\\/]+/))
}
-export function collectKnownPublicConfigDefinitionPaths(
- aindexDir: string,
- options?: PublicDefinitionResolveOptions
-): string[] {
- const resolvedPaths = new Set([
- resolvePublicDefinitionPath(aindexDir, PUBLIC_PROXY_FILE_NAME)
- ])
+export function collectKnownPublicConfigDefinitionPaths(aindexDir: string, options?: PublicDefinitionResolveOptions): string[] {
+ const resolvedPaths = new Set([resolvePublicDefinitionPath(aindexDir, PUBLIC_PROXY_FILE_NAME)])
for (const targetRelativePath of KNOWN_PUBLIC_CONFIG_TARGET_RELATIVE_PATHS) {
- resolvedPaths.add(
- resolvePublicDefinitionPath(aindexDir, targetRelativePath, options)
- )
+ resolvedPaths.add(resolvePublicDefinitionPath(aindexDir, targetRelativePath, options))
}
return [...resolvedPaths]
@@ -186,13 +146,10 @@ export function readPublicIdeConfigDefinitionFile(
fs: typeof import('node:fs'),
options?: PublicDefinitionResolveOptions
): ProjectIDEConfigFile | undefined {
- const absolutePath = resolvePublicDefinitionPath(
- aindexDir,
- targetRelativePath,
- options
- )
- if (!(fs.existsSync(absolutePath) && fs.statSync(absolutePath).isFile()))
- { return void 0 }
+ const absolutePath = resolvePublicDefinitionPath(aindexDir, targetRelativePath, options)
+ if (!(fs.existsSync(absolutePath) && fs.statSync(absolutePath).isFile())) {
+ return void 0
+ }
const content = fs.readFileSync(absolutePath, 'utf8')
return {
diff --git a/sdk/src/runtime/cleanup.ts b/sdk/src/runtime/cleanup.ts
index 0ec5d199..f430c635 100644
--- a/sdk/src/runtime/cleanup.ts
+++ b/sdk/src/runtime/cleanup.ts
@@ -4,8 +4,7 @@ import type {
OutputCleanupDeclarations,
OutputCleanupPathDeclaration,
OutputFileDeclaration,
- OutputPlugin,
- PluginOptions
+ OutputPlugin
} from '../plugins/plugin-core'
import type {ProtectionMode, ProtectionRuleMatcher} from '../ProtectedDeletionGuard'
import * as path from 'node:path'
@@ -14,7 +13,6 @@ import {loadAindexProjectConfig} from '../aindex-config/AindexProjectConfigLoade
import {getNativeBinding} from '../core/native-binding'
import {collectAllPluginOutputs} from '../plugins/plugin-core'
import {
- collectConfiguredAindexInputRules,
collectProjectRoots,
collectProtectedInputSourceRules,
logProtectedDeletionGuardError
@@ -391,22 +389,6 @@ async function buildCleanupSnapshot(
})
}
- if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) {
- for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, {
- workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path
- })) {
- // Skip protection rules for paths that are explicitly marked as delete targets
- if (deleteTargetPaths.has(path.resolve(rule.path))) continue
- protectedRules.push({
- path: rule.path,
- protectionMode: mapProtectionMode(rule.protectionMode),
- reason: rule.reason,
- source: rule.source,
- ...rule.matcher != null ? {matcher: mapProtectionRuleMatcher(rule.matcher)} : {}
- })
- }
- }
-
protectedRules.push(...collectConfiguredCleanupProtectionRules(cleanCtx))
let emptyDirExcludeGlobs: string[] | undefined
diff --git a/sdk/test/native-binding/cleanup.ts b/sdk/test/native-binding/cleanup.ts
index d1320e98..dd00fdf1 100644
--- a/sdk/test/native-binding/cleanup.ts
+++ b/sdk/test/native-binding/cleanup.ts
@@ -1,11 +1,10 @@
+import type {ILogger} from '@truenine/logger'
import type {
- ILogger,
OutputCleanContext,
OutputCleanupDeclarations,
OutputCleanupPathDeclaration,
OutputFileDeclaration,
- OutputPlugin,
- PluginOptions
+ OutputPlugin
} from '../../src/plugins/plugin-core'
import type {ProtectedPathRule, ProtectionMode, ProtectionRuleMatcher} from '../../src/ProtectedDeletionGuard'
import type {DeletionError} from './desk-paths'
@@ -18,7 +17,6 @@ import {buildDiagnostic, buildFileOperationDiagnostic, diagnosticLines} from '..
import {collectAllPluginOutputs} from '../../src/plugins/plugin-core'
import {
buildComparisonKeys,
- collectConfiguredAindexInputRules,
collectProjectRoots,
collectProtectedInputSourceRules,
createProtectedDeletionGuard,
@@ -234,13 +232,6 @@ async function collectCleanupTargets(
for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) {
addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source)
}
- if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) {
- for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, {
- workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path
- })) {
- addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source, rule.matcher)
- }
- }
for (const rule of cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []) {
addProtectRule(
diff --git a/sdk/test/setup-native-binding.ts b/sdk/test/setup-native-binding.ts
index 48fa5e04..507e43a8 100644
--- a/sdk/test/setup-native-binding.ts
+++ b/sdk/test/setup-native-binding.ts
@@ -1,4 +1,5 @@
-import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../src/plugins/plugin-core'
+import type {ILogger} from '@truenine/logger'
+import type {OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../src/plugins/plugin-core'
import * as fs from 'node:fs'
import * as path from 'node:path'
import glob from 'fast-glob'
@@ -43,14 +44,16 @@ interface NativeCleanupSnapshot {
}
function createMockLogger(): ILogger {
- return {
+ const logger = {
trace: () => {},
debug: () => {},
info: () => {},
warn: () => {},
error: () => {},
fatal: () => {}
- } as ILogger
+ } satisfies ILogger
+
+ return logger
}
function createSyntheticOutputPlugin(snapshot: NativePluginCleanupSnapshot): OutputPlugin {
diff --git a/turbo.json b/turbo.json
index f84f8440..fd165d95 100644
--- a/turbo.json
+++ b/turbo.json
@@ -6,19 +6,22 @@
"outputs": ["dist/**", "*.node", ".next/**", "!.next/cache/**"]
},
"test": {
- "dependsOn": ["build", "lint", "typecheck"],
+ "dependsOn": ["build"],
"outputs": []
},
"lint": {
"dependsOn": ["^build"],
- "outputs": []
+ "outputs": [".eslintcache"],
+ "cache": true
},
- "lintfix": {
- "outputs": []
+ "lint:fix": {
+ "outputs": [".eslintcache"],
+ "cache": true
},
- "typecheck": {
+ "check:type": {
"dependsOn": ["^build"],
- "outputs": []
+ "outputs": [],
+ "cache": true
}
}
}