diff --git a/.env.example b/.env.example
index 42ef528..4c18a82 100644
--- a/.env.example
+++ b/.env.example
@@ -23,6 +23,9 @@ NEXUS_COLLAB_SERVER_PORT=1234
# Public WebSocket URL that browsers use to connect to the collab server.
NEXT_PUBLIC_COLLAB_SERVER_URL=ws://localhost:1234
+# Persistent Documents Skill Library directory. Defaults to ./.nexus-library when unset.
+NEXUS_LIBRARY_DATA_DIR=/path/to/nexus-library-data
+
# ── Authentication (OIDC/OAuth2) ─────────────────────────────────────────────
# When all four AUTH_* variables below are set, Nexus requires SSO login.
# When absent, the app is open (no authentication).
diff --git a/.gitignore b/.gitignore
index 84ea765..04a89c4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -74,7 +74,7 @@ _workspace
/graphify-out
_workspace*
-# Local runtime data stores (Brain / collab)
+# Local runtime data stores (Brain / collab / library)
/.nexus-brain/
/.nexus-collab/
-
+/.nexus-library/
diff --git a/README.md b/README.md
index b2fe9eb..ac1c0fd 100644
--- a/README.md
+++ b/README.md
@@ -28,6 +28,22 @@ Nexus is a visual workflow editor for designing, composing, and exporting AI wor
- Export generated files as a ZIP or write them directly into a target folder
- Include generated `run-.sh` and `run-.bat` helper scripts with exported workflow artifacts
+### 📦 Documents Skill Library
+
+The Documents Skill Library is a versioned, sharable home for Markdown skills:
+
+- **Workspace** and **user-local** library scopes per workspace
+- **Packs** group related skills, references, docs, rules, templates, examples, and assets
+- **Real-time Markdown editing** of skill content through the Documents panel, backed by the same Hocuspocus collaboration server used for the canvas
+- **Branch + fork**: derive a user-local fork from a workspace pack, three-way merge updates from base when needed, and resolve conflicts inline
+- **Publish** at both pack-version (semver) and skill-version granularity; published versions are immutable snapshots
+- **Workflow Skill nodes** can reference a library skill by `scope + packId + packVersion + skillId`. The reference is resolved live from the library or pinned to a published version
+- **`.nexus` archive export**: a self-contained zip including `manifest.json`, `workflow.json`, every reachable pack's `manifest.json`, every referenced document's content, `runtime/resolver-metadata.json`, and `hashes.json` for integrity verification
+- **Import** of `.nexus` archives and best-effort import of Agent Skills folders/zips
+
+Open the **Library** button in the editor header to access the panel. Persistent
+storage lives under `NEXUS_LIBRARY_DATA_DIR` (defaults to `./.nexus-library`).
+
### 📝 Content and Agent Authoring
- Fullscreen editing for prompts and documents
diff --git a/docker-compose.yml b/docker-compose.yml
index db86ce6..308236b 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -19,6 +19,7 @@ services:
HOSTNAME: "0.0.0.0"
NEXUS_BRAIN_DATA_DIR: /data/brain
NEXUS_COLLAB_DATA_DIR: /data/collab
+ NEXUS_LIBRARY_DATA_DIR: /data/library
# Do NOT set NEXT_PUBLIC_COLLAB_SERVER_URL — leave unset so the browser
# auto-resolves to same-origin `wss:///collab`. Setting it bakes
# a fixed URL into the JS bundle at build time.
@@ -32,9 +33,11 @@ services:
- "3000"
volumes:
- nexus_brain_data:/data/brain
+ - nexus_library_data:/data/library
- nexus_collab_data:/data/collab
restart: unless-stopped
volumes:
nexus_brain_data:
nexus_collab_data:
+ nexus_library_data:
diff --git a/docs/tasks/conditional_docs.md b/docs/tasks/conditional_docs.md
index 3cdfed6..39f56f1 100644
--- a/docs/tasks/conditional_docs.md
+++ b/docs/tasks/conditional_docs.md
@@ -19,3 +19,15 @@
- When modifying `src/app/api/workspaces/*`, `src/lib/workspace/*`, `src/components/workspace/*`, or `src/app/workspace/[id]/**`
- When changing routing between `/`, `/editor`, and `/workspace/[id]/workflow/[wid]`
- When touching workspace auto-save, recent-workspace `localStorage` history, or stable Y.js room IDs for workspace workflows
+- docs/tasks/documents-skill-library/doc-documents-skill-library.md
+ - Conditions:
+ - When working with the Documents Skill Library (packs, skills, documents, publish, fork/merge, validation, .nexus export/import)
+ - When modifying `src/app/api/library/*` routes or the `src/lib/library-store/*` layer
+ - When changing the workflow Skill node's `libraryRef` data or the SkillPickerDialog
+ - When updating the Documents panel UI or the per-document Y.js collab binding
+- docs/tasks/feature-documents-skill-library-60d267bf/doc-feature-documents-skill-library-60d267bf.md
+ - Conditions:
+ - When working with the Documents Skill Library MVP (packs, skills, documents, publish, fork/merge, validation, `.nexus` export/import)
+ - When modifying `src/app/api/library/*` routes or the `src/lib/library-store/*` layer
+ - When changing the workflow Skill node's `libraryRef` data or the `SkillPickerDialog`
+ - When updating the Documents panel UI or the per-document Y.js collab binding for library docs
diff --git a/docs/tasks/documents-skill-library/doc-documents-skill-library.md b/docs/tasks/documents-skill-library/doc-documents-skill-library.md
new file mode 100644
index 0000000..f4f7427
--- /dev/null
+++ b/docs/tasks/documents-skill-library/doc-documents-skill-library.md
@@ -0,0 +1,133 @@
+# Documents Skill Library
+
+## What was built
+
+A new layer on top of the existing Brain-style filesystem document store that
+manages **versioned packs of Markdown skills** with workspace and user-local
+scopes, branch/fork flows, three-way Markdown merge, publish at pack and skill
+granularity, immutable pack-version snapshots, a self-contained `.nexus`
+workflow export, and best-effort Agent Skills compatibility.
+
+## Where things live
+
+### Server / storage layer
+- `src/lib/library-store/config.ts` — reads `NEXUS_LIBRARY_DATA_DIR` (default
+ `./.nexus-library`); reuses `NEXUS_BRAIN_TOKEN_SECRET` for auth parity.
+- `src/lib/library-store/types.ts` — record types
+ (`LibraryRecord`, `PackRecord`, `SkillRecord`, `LibraryDocumentRecord`,
+ `LibraryDocumentVersionRecord`, `PackVersionRecord`, `SkillVersionRecord`,
+ `BranchRecord`, `MergeRecord`, `ConflictRecord`, `LibraryManifest`,
+ `SkillBundle`, `ValidationWarning`).
+- `src/lib/library-store/object-store.ts` — `ObjectStorage` interface +
+ `FilesystemObjectStorage` driver. Keys follow the spec layout:
+ `documents/{docId}/versions/{versionId}/content.md`,
+ `documents/{docId}/versions/{versionId}/metadata.json`,
+ `packs/{packId}/versions/{versionId}/manifest.json`,
+ `exports/{exportId}/workflow-export.nexus`.
+- `src/lib/library-store/store.ts` — `LibraryStore` singleton with
+ `ensureLibraries`, `createPack`, `forkPack`, `softDeletePack`, `restorePack`,
+ `renamePack`, `searchPacks`, `createDocument`,
+ `saveDocumentVersion` (optimistic concurrency via `previousVersionId`),
+ `softDeleteDocument`, `createSkill`, `softDeleteSkill`, `publishPackVersion`,
+ `publishSkillVersion`, `mergeBaseIntoBranch`, `resolveMergeConflict`,
+ `resolveLive`, `compareDraftToPublished`, etc.
+- `src/lib/library-store/manifest.ts` — `buildManifest()` emits the normalized
+ `ManifestSchemaV1` shape.
+- `src/lib/library-store/merge.ts` — `threeWayTextMerge()` (no extra deps).
+- `src/lib/library-store/validation.ts` — `validatePack()`,
+ `parseFrontmatter()`, `parseSkillFrontmatter()`.
+- `src/lib/library-store/hashing.ts` — `sha256`, `computeContentHash`,
+ `buildHashManifest`.
+- `src/lib/library-store/resolver.ts` — `resolveLive` and
+ `resolveFromArtifact` (for read-only resolution from a `.nexus` archive).
+- `src/lib/library-store/export.ts` — `buildNexusArchive(workflowJson, …)`
+ walks the workflow for `libraryRef` references, snapshots each reachable pack
+ + skill + reference document into a JSZip archive, writes `manifest.json`,
+ `workflow.json`, `runtime/resolver-metadata.json`, and `hashes.json`.
+- `src/lib/library-store/import.ts` — `importNexusArchive(buffer)` (verifies
+ every file's SHA-256 against `hashes.json` before importing) and
+ `importAgentSkillsFolder(buffer)` (best-effort).
+- `src/lib/library-store/brain-migration.ts` — `migrateBrainDocsToUserLibrary`
+ helper that imports existing Brain docs into a new library pack.
+- `src/lib/library-store/schemas.ts` — Zod-v4 schemas for manifest, frontmatter,
+ and every API payload.
+
+### API routes
+Every route lives under `src/app/api/library/**/route.ts` and follows the
+existing Brain-route pattern (token auth via `requireWorkspace`,
+JSON in/out). Endpoints:
+- `POST /api/library/session` — bootstrap or resume a library session
+- `GET|POST /api/library/packs` — list / create
+- `GET|PATCH|DELETE /api/library/packs/[packId]` — get / rename / soft-delete
+- `POST /api/library/packs/[packId]/fork`
+- `POST /api/library/packs/[packId]/merge-base`
+- `GET|POST /api/library/packs/[packId]/versions`
+- `GET /api/library/packs/[packId]/versions/[versionId]`
+- `GET|POST /api/library/packs/[packId]/documents`
+- `GET|PATCH|DELETE /api/library/packs/[packId]/documents/[docId]`
+- `GET|POST /api/library/packs/[packId]/documents/[docId]/versions`
+- `GET /api/library/packs/[packId]/documents/[docId]/versions/[versionId]/content`
+- `GET|POST /api/library/packs/[packId]/skills`
+- `GET|PATCH|DELETE /api/library/packs/[packId]/skills/[skillId]`
+- `GET|POST /api/library/packs/[packId]/skills/[skillId]/versions`
+- `GET|POST /api/library/packs/[packId]/merges/[mergeId]/resolve`
+- `POST /api/library/resolve`
+- `POST /api/library/import` (multipart form: file, format, scope)
+- `POST /api/library/export` — streams a `.nexus` zip back
+
+### Client
+- `src/lib/library-client.ts` — typed fetch wrappers around the API routes.
+- `src/store/library-docs/store.ts` — Zustand store (`useLibraryDocsStore`)
+ exposing async actions matching the API surface plus pending-merge bookkeeping.
+
+### Collaboration
+- `src/lib/collaboration/lib-doc-collab.ts` — `openLibraryDocRoom()` opens a
+ Hocuspocus room per library document with room name
+ `lib:{workspaceId}:{scope}:{packId}:{docId}` and binds a single `Y.Text`
+ (`content`).
+
+### UI
+- `src/components/workflow/documents-panel/` — entire panel:
+ `panel.tsx`, `pack-browser.tsx`, `pack-detail.tsx`, `file-tree.tsx`,
+ `doc-editor.tsx`, `markdown-preview.tsx`, `skill-detail-panel.tsx`,
+ `publish-panel.tsx`, `branch-status-panel.tsx`,
+ `conflict-resolve-dialog.tsx`, `import-dialog.tsx`, plus
+ `use-documents-panel-controller.ts`, `constants.ts`, `types.ts`, `index.ts`.
+- `src/components/workflow/properties/skill-picker-dialog.tsx` — Skill picker
+ dialog + `LibraryRefSection` used by the Skill node properties form.
+- `src/components/workflow/header/session-actions.tsx` — adds a **Library**
+ button that dispatches `nexus:toggle-documents-panel`.
+- `src/components/workflow/workflow-editor.tsx` — mounts `DocumentsPanel` and
+ listens for the toggle event.
+- `src/components/workflow/generated-export-dialog.tsx` — adds a **Download
+ .nexus archive** action alongside the ZIP and folder export actions.
+
+### Skill node updates
+- `src/types/workflow.ts` and `src/nodes/skill/types.ts` add a
+ `libraryRef: { scope, packId, packKey?, packVersion, skillId, skillKey?, skillName? } | null`
+ field.
+- `src/nodes/skill/constants.ts` updates the default data and Zod schema to
+ include `libraryRef`.
+- `src/nodes/skill/fields.tsx` adds a Library Reference section above the
+ inline fields.
+- `src/nodes/skill/generator.ts` accepts an optional `resolvedBundle` and emits
+ the bundle's entrypoint content when a `libraryRef` is set.
+- `src/nodes/skill/node.tsx` shows a pack badge when `libraryRef` is present.
+
+### Tests
+- `src/lib/__tests__/library-store.test.ts` — full store test (AC-1..AC-8 spine).
+- `src/lib/__tests__/library-merge.test.ts` — three-way merge edge cases.
+- `src/lib/__tests__/library-validation.test.ts` — every Validation Requirements rule.
+- `src/lib/__tests__/library-export.test.ts` — `.nexus` archive contents,
+ hash round-trip, artifact-only resolution (AC-10, AC-11).
+- `src/lib/__tests__/library-import.test.ts` — round-trip + hash mismatch.
+- `src/lib/__tests__/library-resolver.test.ts` — draft vs pinned semantics.
+- `src/store/__tests__/library-docs.test.ts` — store smoke test.
+- `src/nodes/skill/__tests__/generator.test.ts` — inline + library-ref paths.
+
+### Configuration
+- `.env.example` — adds `NEXUS_LIBRARY_DATA_DIR`.
+- `.gitignore` — ignores `.nexus-library/`.
+- `scripts/start.sh` — provisions the library data dir alongside Brain/collab.
+- `docker-compose.yml` — adds `nexus_library_data` volume mounted at
+ `/data/library` with `NEXUS_LIBRARY_DATA_DIR` env.
diff --git a/docs/tasks/feature-documents-skill-library-60d267bf/assets/01_main_editor.png b/docs/tasks/feature-documents-skill-library-60d267bf/assets/01_main_editor.png
new file mode 100644
index 0000000..3ee02d7
Binary files /dev/null and b/docs/tasks/feature-documents-skill-library-60d267bf/assets/01_main_editor.png differ
diff --git a/docs/tasks/feature-documents-skill-library-60d267bf/assets/02_documents_panel_new_pack.png b/docs/tasks/feature-documents-skill-library-60d267bf/assets/02_documents_panel_new_pack.png
new file mode 100644
index 0000000..e5572f5
Binary files /dev/null and b/docs/tasks/feature-documents-skill-library-60d267bf/assets/02_documents_panel_new_pack.png differ
diff --git a/docs/tasks/feature-documents-skill-library-60d267bf/assets/03_canvas_with_skill_node.png b/docs/tasks/feature-documents-skill-library-60d267bf/assets/03_canvas_with_skill_node.png
new file mode 100644
index 0000000..db69805
Binary files /dev/null and b/docs/tasks/feature-documents-skill-library-60d267bf/assets/03_canvas_with_skill_node.png differ
diff --git a/docs/tasks/feature-documents-skill-library-60d267bf/assets/04_skill_node_properties.png b/docs/tasks/feature-documents-skill-library-60d267bf/assets/04_skill_node_properties.png
new file mode 100644
index 0000000..f2c0113
Binary files /dev/null and b/docs/tasks/feature-documents-skill-library-60d267bf/assets/04_skill_node_properties.png differ
diff --git a/docs/tasks/feature-documents-skill-library-60d267bf/assets/05_skill_picker_dialog.png b/docs/tasks/feature-documents-skill-library-60d267bf/assets/05_skill_picker_dialog.png
new file mode 100644
index 0000000..8532493
Binary files /dev/null and b/docs/tasks/feature-documents-skill-library-60d267bf/assets/05_skill_picker_dialog.png differ
diff --git a/docs/tasks/feature-documents-skill-library-60d267bf/doc-feature-documents-skill-library-60d267bf.md b/docs/tasks/feature-documents-skill-library-60d267bf/doc-feature-documents-skill-library-60d267bf.md
new file mode 100644
index 0000000..f979cfa
--- /dev/null
+++ b/docs/tasks/feature-documents-skill-library-60d267bf/doc-feature-documents-skill-library-60d267bf.md
@@ -0,0 +1,184 @@
+# Documents Skill Library
+
+**ADW ID:** 60d267bf
+**Date:** 2026-04-25
+**Plan:** docs/tasks/feature-documents-skill-library-60d267bf/plan-feature-documents-skill-library-60d267bf.md
+
+## Overview
+
+Adds a versioned, sharable home for Markdown skills with workspace and user-local
+scopes, branchable packs, real-time collaborative editing, publish flows, and a
+self-contained `.nexus` archive export. Workflow Skill nodes can now reference a
+library skill by `scope + packId + packVersion + skillId`, and exported workflows
+bundle every reachable pack so they resolve offline.
+
+## Screenshots
+
+
+
+
+
+
+
+
+
+
+
+## What Was Built
+
+- Filesystem-backed library metadata store with RustFS-shaped object keys
+ (`src/lib/library-store/`)
+- API surface under `src/app/api/library/**` covering session, packs, documents,
+ versions, skills, fork, merge-base, conflict resolve, publish, resolve, import,
+ and export
+- Three-way Markdown merge with conflict records and an inline resolver dialog
+- Pack/skill/document validation (entrypoint, frontmatter, duplicate ids,
+ broken references, manifest mismatches, deleted-but-referenced docs)
+- Documents panel UI: scope tabs, pack browser, four-column pack detail with
+ file tree, Markdown editor, preview, skill detail/validation, and publish/branch
+ panels
+- Per-document Y.js collab binding reusing the Hocuspocus server (room name
+ `lib:{workspaceId}:{scope}:{packId}:{docId}`)
+- Skill node `libraryRef` data field plus a `SkillPickerDialog` for linking a
+ workflow Skill node to a library skill at a specific version (or `draft`)
+- Skill generator routes content through the resolved library bundle when a ref
+ is set, falling back to inline behavior otherwise
+- `.nexus` archive export with `manifest.json`, `workflow.json`, snapshotted pack
+ contents, `runtime/resolver-metadata.json`, and `hashes.json`
+- Import for Nexus-native archives and best-effort Agent Skills folders/zips
+- Test coverage for storage, merge, validation, export, import, resolver, and
+ the library Zustand store
+
+## Technical Implementation
+
+### Files Modified
+
+- `src/types/workflow.ts`: extends node-data union for the new `libraryRef`
+- `src/nodes/skill/types.ts`: adds `SkillLibraryRef` interface to `SkillNodeData`
+- `src/nodes/skill/constants.ts`: defaults `libraryRef: null`; Zod schema entry
+- `src/nodes/skill/generator.ts`: accepts a resolved `SkillBundle` and emits
+ pack-derived `SKILL.md` content when a library ref is set
+- `src/nodes/skill/fields.tsx`, `src/nodes/skill/node.tsx`: render library
+ reference badge + "Link to library skill" entry point
+- `src/components/workflow/properties/skill-picker-dialog.tsx`: new dialog for
+ picking scope/pack/skill/version
+- `src/components/workflow/generated-export-dialog.tsx`: adds the `.nexus`
+ archive option to the export flow
+- `src/components/workflow/header/session-actions.tsx`,
+ `src/components/workflow/workflow-editor.tsx`: surfaces the Documents panel
+ toggle in the header
+- `src/lib/collaboration/collab-doc.ts`: small adjustment compatible with the
+ new per-document collab binding
+- `.env.example`, `.gitignore`, `docker-compose.yml`, `next.config.ts`,
+ `scripts/start.sh`, `README.md`: env, ignore, deploy, and docs wiring for
+ `NEXUS_LIBRARY_DATA_DIR`
+
+### New Files (selected)
+
+- `src/lib/library-store/store.ts`: `LibraryStore` class — packs, skills,
+ documents, versions, branches, merges, publish, resolve. Singleton via
+ `getLibraryStore()` mirroring `BrainStore`
+- `src/lib/library-store/object-store.ts`: `ObjectStorage` interface +
+ filesystem driver with immutable version keys
+- `src/lib/library-store/merge.ts`: line-based diff3 with structured conflicts
+- `src/lib/library-store/manifest.ts`, `validation.ts`: normalized manifest +
+ full validation rule set
+- `src/lib/library-store/resolver.ts`: live and artifact-mode skill resolution
+- `src/lib/library-store/export.ts`, `import.ts`: `.nexus` archive build /
+ read with hash verification
+- `src/lib/library-store/schemas.ts`: Zod-v4 schemas for manifest, frontmatter,
+ and every API payload
+- `src/lib/library-client.ts`: typed fetch wrapper using the Brain token
+- `src/lib/collaboration/lib-doc-collab.ts`: per-document Y.Text room opener
+- `src/store/library-docs/`: Zustand slice for packs/skills/documents and
+ pending merges
+- `src/types/library.ts`: shared types (`LibraryScope`, `PackRef`, `SkillRef`,
+ `SkillBundle`, `MergeState`, `ValidationWarning`)
+- `src/components/workflow/documents-panel/`: panel, pack-browser, pack-detail,
+ file-tree, doc-editor, markdown-preview, skill-detail-panel, publish-panel,
+ branch-status-panel, conflict-resolve-dialog, import-dialog, controller hook
+- `src/app/api/library/**/route.ts`: 18 routes covering session, packs, fork,
+ merge-base, documents, versions, skills, merges, resolve, import, export
+
+### Key Changes
+
+- Library metadata persists in a single `manifest.json` plus per-version files
+ under `NEXUS_LIBRARY_DATA_DIR` (default `./.nexus-library`); the layout matches
+ S3/RustFS keys so a future driver swap is single-file
+- Document version writes use optimistic concurrency on `previousVersionId`;
+ stale heads are rejected with `StaleVersionError`
+- Publishing a pack snapshots every current document head into
+ `pack_version_documents` and writes a normalized manifest at
+ `packs/{packId}/versions/{versionId}/manifest.json`; published versions are
+ immutable
+- Skill node generation calls `resolveLive()` (or reads from the artifact at
+ export time) and feeds a `SkillBundle` into `generator.getSkillFile()`. With
+ no `libraryRef`, the inline path is preserved for back-compat
+- Forking a workspace pack creates a user-local copy with `base_version_id` set
+ per document; "Merge latest base" runs three-way merge, producing clean
+ versions or `document_merges` + `document_conflicts` for conflicting edits
+- The `.nexus` export traverses every Skill node's `libraryRef`, gathers the
+ pack manifest + closure of referenced docs/rules/assets, and writes
+ SHA-256 hashes for every file alongside resolver metadata so an importer can
+ resolve skills without the live store
+
+## How to Use
+
+1. Open the editor and click **Library** in the header toolbar to open the
+ Documents Skill Library panel.
+2. In the **Workspace** tab, click **+ New** and create a pack (e.g.
+ `customer-support`).
+3. Inside the pack, create a skill — this generates a `SKILL.md` entrypoint.
+ Edit the Markdown in the doc editor; saves create new immutable versions.
+4. Add supporting documents under appropriate roles (references, docs, rules,
+ templates, examples, assets).
+5. Click **Publish pack version**, enter a semver string, and confirm — the
+ panel will block publish if validation warnings exist.
+6. To consume a skill in a workflow: drop a **Skill** node, open its properties,
+ click **Link to library skill**, choose scope → pack → skill → version, and
+ confirm. The node displays a pack badge and the generator pulls content from
+ the pack on export.
+7. To share a workflow self-contained: open **Generate / Export** → choose
+ **Nexus archive** → download. Re-importing the `.nexus` file reproduces the
+ linked packs and skills byte-for-byte.
+8. To branch: open a workspace pack, click **Fork to user-local**. Edit your
+ fork independently; when the workspace base advances, click **Merge latest
+ base** in the branch status panel to pull updates (conflicts open the
+ resolver dialog).
+
+## Configuration
+
+- `NEXUS_LIBRARY_DATA_DIR` — directory for the library manifest and version
+ objects (default `./.nexus-library`)
+- `NEXUS_BRAIN_TOKEN_SECRET` — reused for library session HMAC tokens so the
+ library shares the Brain workspace identity
+- `.nexus-library/` is gitignored
+- `docker-compose.yml` mounts a `.nexus-library` volume mirroring the Brain dir
+
+## Testing
+
+- `bun run test:lib` — covers `library-store`, `library-merge`,
+ `library-validation`, `library-export`, `library-import`, `library-resolver`
+- `bun run test:store` — covers the `library-docs` Zustand slice
+- `bun run test:nodes` — Skill node generator (with and without `libraryRef`)
+- `bun run typecheck` and `bun run lint` for type/lint regressions
+- `bun run build` for export/route/wiring regressions
+- Manual smoke (also captured in
+ `e2e-feature-documents-skill-library-60d267bf.md`): create a pack, add a
+ skill, save, publish `1.0.0`, fork to user-local, edit and republish base
+ as `1.1.0`, merge into the fork, link the skill in a workflow Skill node,
+ export `.nexus`, re-import, confirm the skill resolves with the same content
+
+## Notes
+
+- The codebase has no relational database; the same semantic schema is held in
+ a JSON manifest plus per-record files. Swapping in Postgres/SpacetimeDB later
+ is a storage-driver change.
+- `.nexus` is provisional — exposed via a single helper for easy renaming.
+- Scripts inside packs are stored as documents only; nothing in this feature
+ executes user-supplied content.
+- Existing Brain documents stay under `/api/brain`; the library is a parallel
+ system. A `brain-migration.ts` helper exists for a one-click "import Brain
+ docs into user library" flow.
+- Workflows that pre-date this feature continue to work — Skill nodes default
+ `libraryRef: null` and use the existing inline content path.
diff --git a/docs/tasks/feature-documents-skill-library-60d267bf/e2e-feature-documents-skill-library-60d267bf.md b/docs/tasks/feature-documents-skill-library-60d267bf/e2e-feature-documents-skill-library-60d267bf.md
new file mode 100644
index 0000000..ec6baa0
--- /dev/null
+++ b/docs/tasks/feature-documents-skill-library-60d267bf/e2e-feature-documents-skill-library-60d267bf.md
@@ -0,0 +1,53 @@
+# E2E: Documents Skill Library
+
+## User Story
+
+As a workspace user, I can create a pack with two skills, fork it into my user-local
+library, edit a skill in real-time with a collaborator, publish a pack version,
+reference that skill in a workflow, and export a self-contained `.nexus` archive
+that resolves skill content offline.
+
+## Test Steps (playwright-cli)
+
+1. Open app at `http://localhost:3000` (screenshot: `01-app-initial.png`).
+2. Open Documents panel from header toolbar by clicking the **Library** button (screenshot: `02-empty-workspace-library.png`).
+3. Click **+ New** in the workspace pack list, enter `customer-support` as the pack key and `Customer Support` as the name, click **Create** (screenshot: `03-pack-detail-empty.png`).
+4. In the file tree, click **+ add** under `SKILL.md` and create `support-triage/SKILL.md` (screenshot: `04-skill-doc-created.png`).
+5. Select the new SKILL.md, edit it to contain:
+ ```markdown
+ ---
+ name: support-triage
+ description: Classifies support requests.
+ ---
+ # Support Triage
+
+ Initial instructions.
+ ```
+ Click **Save snapshot** (screenshot: `05-version-history.png`).
+6. Add `references/escalation-policy.md` via the file tree's References section (screenshot: `06-file-tree-two-docs.png`).
+7. Click **+ New skill**, fill in `support-triage` for the key, `Support Triage` for the name, `Classifies support requests.` for description, select the SKILL.md as the entrypoint, click **Create skill**.
+8. In the right column, enter `1.0.0` as the version and click **Publish version** (screenshot: `07-publish-success.png`).
+9. Hover over the pack in the Workspace tab and click the **fork** icon to fork into the user-local library. Switch to the **User-local** tab — the pack appears with a "forked" badge (screenshot: `08-fork-badge.png`).
+10. Switch back to the **Workspace** tab, select `customer-support`, edit `SKILL.md` to append `\n\nAdded v1.1 guidance.`, click **Save snapshot**, then publish version `1.1.0`.
+11. Switch to the **User-local** tab, select the forked pack, click **Merge latest base** in the branch status panel — expect a clean merge toast and the appended text in the SKILL.md preview (screenshot: `09-merge-clean.png`).
+12. Open a workflow, place a Skill node, in its properties open **Library Reference → Link to library skill**. Select `workspace / customer-support / support-triage @ 1.1.0` in the picker. The Skill node now displays a pack badge (screenshot: `10-skill-node-badge.png`).
+13. Open **Generate / Export** dialog, click **Download .nexus archive**, capture the download (screenshot: `11-nexus-archive-download.png`).
+14. Open the **Import** dialog in a fresh workspace (or via clearing the data dir), upload the `.nexus`, and confirm the skill resolves with the saved content (screenshot: `12-resolve-after-import.png`).
+
+## Success Criteria
+
+- Pack `customer-support` and skill `support-triage` appear with the exact names above.
+- Published versions list contains both `1.0.0` and `1.1.0`.
+- Forked pack shows `forked` and `behind base` before merge, becomes in-sync after merge.
+- Workflow Skill node displays the pack badge `workspace/customer-support@1.1.0`.
+- Exported `.nexus` archive contains:
+ - `manifest.json`
+ - `workflow.json`
+ - `libraries/workspace/packs/customer-support/skills/support-triage/SKILL.md`
+ - `hashes.json`
+ - `runtime/resolver-metadata.json`
+- Re-importing the archive reproduces the SKILL.md content byte-for-byte (every entry in `hashes.json` matches the imported file's SHA-256).
+
+## Screenshot Capture Points
+
+Capture screenshots at each numbered step above. Save under `screenshots/feature-documents-skill-library-60d267bf/`.
diff --git a/docs/tasks/feature-documents-skill-library-60d267bf/plan-feature-documents-skill-library-60d267bf.md b/docs/tasks/feature-documents-skill-library-60d267bf/plan-feature-documents-skill-library-60d267bf.md
new file mode 100644
index 0000000..fc757ee
--- /dev/null
+++ b/docs/tasks/feature-documents-skill-library-60d267bf/plan-feature-documents-skill-library-60d267bf.md
@@ -0,0 +1,536 @@
+# feature: documents-skill-library
+
+## Metadata
+adw_id: `60d267bf`
+issue_description: `docs/spec/spec-documents-skill-library.md` — Documents Skill Library (workspace + user-local library, packs/plugins, skills, real-time Markdown collaboration, branch/merge, publish, self-contained workflow export). See `agents/60d267bf/start-task.json`.
+
+## Description
+
+Nexus Workflow Studio currently has:
+
+- a Brain document store (filesystem, signed token, version snapshots, soft-delete) at `src/lib/brain/server.ts` + `src/app/api/brain/*`
+- a local library (browser localStorage) storing saved workflows and reusable nodes at `src/lib/library.ts`
+- node types `Skill`, `Document`, `Prompt`, `Script` with per-node generators that emit `SKILL.md`, docs, scripts under `.opencode|.pi|.claude`
+- Hocuspocus-backed real-time collaboration (`src/lib/collaboration/collab-doc.ts`) syncing both workflow canvas and Brain docs via Y.js
+- a Markdown editor via `@uiw/react-md-editor` used in the Brain panel
+- no database (Postgres/SQLite/Drizzle are absent from `package.json`)
+
+The spec asks for a document-centered **library of Markdown skills** grouped into **packs/plugins**, with **workspace** and **user-local** scopes, **branch/fork** flows, **publish at pack and skill level**, **self-contained workflow export**, and **Agent Skills compatibility**. Workflows must reference skills by stable `scope + packId + packVersion + skillId`.
+
+Because this codebase has **no relational database**, we implement metadata in the same filesystem-backed pattern used by Brain: a JSON manifest + per-record files. RustFS-style immutable object-key conventions are followed inside the filesystem so a future swap to S3/RustFS is a storage-driver change. "Workspace" here aligns with the existing Brain workspace (one per signed token).
+
+## Objective
+
+Deliver the MVP slice of the Documents Skill Library spec:
+
+- workspace + user-local library packs
+- file tree + Markdown document editor (real-time collab reusing Hocuspocus)
+- skill folders with `SKILL.md` entrypoints
+- normalized pack manifest
+- validation (missing entrypoint, duplicate IDs, broken references, invalid frontmatter)
+- branch/fork of a workspace pack into a user-local fork with three-way Markdown merge + conflict records
+- publish pack version and publish skill version
+- immutable pack-version snapshots bound to immutable document versions
+- workflow Skill node references `scope + packId + packVersion + skillId` with a skill picker
+- self-contained `.nexus` workflow export (zip) containing workflow definition, referenced packs/skills/documents, normalized manifests, and content hashes
+- import of Nexus-native `.nexus` archives and best-effort Agent Skills folders/zips
+- tests for storage, manifest building, three-way merge, publish, export/import integrity
+- E2E coverage of the golden path
+
+## Problem Statement
+
+The editor has no way to author, version, organize, publish, or export a reusable **library of Markdown skills** that multiple workflows can reference by stable identity. Users currently duplicate skill content in every workflow, lose customizations when a base skill is updated, cannot share packs of related skills, and cannot produce a workflow export that is self-contained (the generated `SKILL.md` files live next to each workflow rather than inside a versioned, sharable pack).
+
+## Solution Statement
+
+Layer a **pack/skill metadata service** and a **library UI** on top of the existing Brain-style filesystem document store. Extend `src/lib/brain` into `src/lib/library-store` (backend) and a matching client. Use Hocuspocus + Y.js for live Markdown editing (one Y.Doc per library document, room name `lib:{workspaceId}:{libraryDocId}`). Add publish flows that snapshot documents into immutable version records. Update the Skill node data model to carry `scope + packId + packVersion + skillId` and resolve content through a runtime resolver that works both live and inside an exported artifact. Add a `.nexus` zip export pipeline that bundles the workflow + reachable packs.
+
+## Code Patterns to Follow
+
+Reference implementations in this repo:
+
+- **Brain filesystem store** (`src/lib/brain/server.ts`) — manifest.json + per-doc + per-version files, HMAC-signed tokens, soft-delete via `deletedAt`, `createVersion()` pattern, singleton accessor. Model the new library store after this.
+- **Brain API routes** (`src/app/api/brain/session/route.ts`, `src/app/api/brain/documents/**`) — token auth via `requireWorkspace()` / `getBrainTokenFromHeaders()`, JSON responses, `[id]` and nested dynamic segments. Mirror this shape under `src/app/api/library/**`.
+- **Knowledge store** (`src/store/knowledge/store.ts`) — async Zustand slice that wraps the Brain API; replicate for library state.
+- **Collab wiring** (`src/lib/collaboration/collab-doc.ts`) — Y.Doc + Hocuspocus provider, awareness for presence, subscribe-observe dedupe pattern. Reuse for per-document Markdown editing: add a second `CollabDoc`-style binding for `Y.Text` per library doc.
+- **Node generator module** (`src/nodes/skill/generator.ts`, `src/lib/workflow-generator.ts`) — `NodeGeneratorModule.getSkillFile()` signature and `GeneratedFile[]` aggregation. Extend to route skill content through the library resolver.
+- **Export paths** (`src/lib/generation-targets.ts`, `buildGeneratedSkillFilePath()`) — keep sanitization helpers; add a new target for `.nexus` archives.
+- **Marketplace** (`src/lib/marketplace/index.ts`) — plugin discovery pattern; reuse `.claude-plugin/marketplace.json` parsing when importing Agent Skills folders.
+- **Workflow JSON validation** (`src/lib/workflow-validation.ts`, `src/lib/workflow-schema.ts`) — Zod-v4 schema style (note: import `"zod/v4"` per project rule).
+- **Library panel UI** (`src/components/workflow/library-panel/panel.tsx` + `use-library-panel-controller.ts`) — controller/view split with shadcn primitives.
+
+## Relevant Files
+
+Use these files to complete the task:
+
+### Spec & guidance
+- `/media/falfaddaghi/extradrive2/repo/NexusWorkflowStudio/trees/rustFS/docs/spec/spec-documents-skill-library.md` — full spec (the authoritative source).
+- `/media/falfaddaghi/extradrive2/repo/NexusWorkflowStudio/trees/rustFS/rustfs-branchable-document-system.md` — branchable document substrate this feature layers on.
+- `CLAUDE.md` — project rules: `@/*` alias, `zod/v4`, dark-theme, client-only storage caveats, update multiple touchpoints when changing nodes.
+- `docs/tasks/persistent-brain/doc-persistent-brain.md` — prior patterns for filesystem doc store, signed tokens, version snapshots, share links. (Read because this task persists documents and extends `src/lib/brain/*`.)
+- `docs/tasks/conditional_docs.md` — confirms the above doc is relevant.
+
+### Server / storage layer (to extend)
+- `src/lib/brain/server.ts` — `BrainStore`, `createVersion()`, token helpers (`requireWorkspace`, `getBrainTokenFromHeaders`, `createShareToken`). Import helpers and follow pattern.
+- `src/lib/brain/types.ts`, `src/lib/brain/client.ts`, `src/lib/brain/config.ts`, `src/lib/brain/schemas.ts` — reference types and schemas.
+- `src/lib/brain/__tests__/*.test.ts` — test patterns for filesystem stores.
+- `src/app/api/brain/**/route.ts` — route shape (token → workspaceId → action).
+
+### Node system (to modify)
+- `src/types/node-types.ts` — node-type enum, library-saveable set.
+- `src/types/workflow.ts` — node-data union (will extend `SkillNodeData`).
+- `src/lib/node-registry.ts` — central registry.
+- `src/nodes/skill/types.ts`, `src/nodes/skill/constants.ts`, `src/nodes/skill/fields.tsx`, `src/nodes/skill/generator.ts`, `src/nodes/skill/node.tsx`, `src/nodes/skill/index.ts`, `src/nodes/skill/script-utils.ts`.
+- `src/nodes/document/types.ts`, `src/nodes/document/fields.tsx`, `src/nodes/document/generator.ts`, `src/nodes/document/utils.ts`.
+- `src/nodes/prompt/types.ts` (already has `brainDocId`; model for pack reference).
+- `src/components/nodes/skill-node.tsx`, `src/components/nodes/document-node.tsx` — renderers.
+- `src/components/workflow/properties/type-specific-fields.tsx`, `.../skill-fields.tsx`, `.../document-fields.tsx`.
+- `src/nodes/shared/registry-types.ts`.
+
+### Generation / export (to modify)
+- `src/lib/workflow-generator.ts` — add pack/skill resolution path.
+- `src/lib/generation-targets.ts` — add `nexus` archive target + helpers.
+- `src/lib/generated-workflow-export.ts` — folder export integration.
+- `src/lib/persistence.ts` — `getWorkflowExportContent()` / `getWorkflowExportFileName()`.
+- `src/lib/workflow-generation/shared.ts`, `src/lib/workflow-generation/detail-sections.ts` — shared utilities.
+- `src/lib/run-script-generator.ts` — run-script emission.
+
+### Store layer (to extend)
+- `src/store/knowledge/store.ts` — async Zustand pattern.
+- `src/store/workflow/store.ts`, `src/store/workflow/index.ts` — canvas store.
+- `src/store/library/store.ts`, `src/store/library-store.ts` — library items (local).
+- `src/store/collaboration/collab-store.ts`, `src/store/collaboration/awareness-store.ts`.
+
+### Collaboration (to extend)
+- `src/lib/collaboration/collab-doc.ts` — Y.Doc singleton and Hocuspocus wiring.
+- `src/lib/collaboration/object-store.ts` — per-room persistence.
+- `src/lib/collaboration/config.ts`, `src/lib/collaboration/awareness-names.ts`.
+- `scripts/collab-server.ts` — Hocuspocus server.
+
+### UI (to extend)
+- `src/components/workflow/library-panel/panel.tsx`, `.../cards.tsx`, `.../constants.ts`, `.../types.ts`, `.../previews.tsx`, `.../use-library-panel-controller.ts`.
+- `src/components/workflow/brain-panel/panel.tsx`, `.../doc-editor.tsx` — Markdown editor + version restore UI to mirror.
+- `src/components/workflow/properties-panel.tsx` — properties host.
+- `src/components/workflow/header.tsx`, `src/components/workflow/workflow-editor.tsx` — top-level wiring.
+- `src/components/workflow/generated-export-dialog.tsx`, `src/components/workflow/import-dialog.tsx` — export/import UI to reuse.
+- `src/components/ui/*` — shadcn primitives; do **not** hand-edit.
+
+### Tests
+- `src/lib/__tests__/brain-server.test.ts`, `src/lib/__tests__/library.test.ts`, `src/lib/__tests__/collaboration-object-store.test.ts`, `src/lib/__tests__/generation-targets.test.ts`, `src/lib/__tests__/run-script-generator.test.ts`, `src/lib/__tests__/workflow-connections.test.ts`, `src/lib/__tests__/subworkflow-transfer.test.ts`.
+- `src/store/__tests__/*.test.ts`.
+- `src/nodes/document/__tests__/generator.test.ts`, `.../utils.test.ts`.
+
+### New Files
+
+**Server / storage layer:**
+- `src/lib/library-store/config.ts` — reads `NEXUS_LIBRARY_DATA_DIR` (default `./.nexus-library`), reuses `NEXUS_BRAIN_TOKEN_SECRET`.
+- `src/lib/library-store/types.ts` — `LibraryScope` (`"workspace" | "user"`), `LibraryRecord`, `PackRecord`, `SkillRecord`, `LibraryDocumentRecord`, `LibraryDocumentVersionRecord`, `PackVersionRecord`, `PackVersionDocumentRecord`, `SkillVersionRecord`, `MergeRecord`, `ConflictRecord`, `LibraryManifest`.
+- `src/lib/library-store/object-store.ts` — S3/RustFS-shaped key layout on the filesystem: `documents/{docId}/versions/{versionId}/content.md`, `documents/{docId}/versions/{versionId}/metadata.json`, `packs/{packId}/versions/{versionId}/manifest.json`, `exports/{exportId}/workflow-export.nexus`. Abstract `ObjectStorage` interface matching the spec.
+- `src/lib/library-store/store.ts` — `LibraryStore` class with `createLibrary`, `createPack`, `forkPack`, `renamePack`, `softDeletePack`, `listPacks`, `createDocument`, `saveDocumentVersion` (with optimistic concurrency via `previousVersionId`), `listDocuments`, `softDeleteDocument`, `renameDocument`, `moveDocument`, `createSkill`, `listSkills`, `mergeBaseIntoBranch`, `resolveMergeConflict`, `publishPackVersion`, `publishSkillVersion`, `resolveLiveSkill`, `listPackVersions`, `listSkillVersions`. Mirrors `BrainStore` patterns.
+- `src/lib/library-store/manifest.ts` — normalizes frontmatter+file-tree into manifest JSON (FR-38, FR-39, FR-41). Schema version `1`.
+- `src/lib/library-store/merge.ts` — three-way Markdown/plain-text merge (use `diff3` style; implement inline — no new dep). Returns `{ content, conflicts[] }` matching spec conflict record shape.
+- `src/lib/library-store/hashing.ts` — SHA-256 helpers; `computeContentHash`, `buildHashManifest` (for FR-65).
+- `src/lib/library-store/validation.ts` — validates missing entrypoint, missing description, duplicate IDs, invalid frontmatter, broken references (FR-36, validation requirements section). Returns typed warnings.
+- `src/lib/library-store/resolver.ts` — runtime resolver: `resolveLive(ref)` and `resolveFromArtifact(ref, artifact)` (see spec "Runtime Resolution").
+- `src/lib/library-store/import.ts` — Nexus-native archive import + best-effort Agent Skills folder/zip import (FR-70, FR-71, FR-72).
+- `src/lib/library-store/export.ts` — `.nexus` archive builder (JSZip). Gathers reachable packs, snapshots draft+published docs, writes `hashes.json`, `manifest.json`, `runtime/resolver-metadata.json`, `workflow.json`, `libraries/{scope}/packs/{packKey}/...`.
+- `src/lib/library-store/schemas.ts` — Zod-v4 schemas for manifest, frontmatter, and API payloads. Use `import { z } from "zod/v4"`.
+- `src/lib/library-store/index.ts` — public barrel.
+
+**API routes (Next.js App Router) under `src/app/api/library/`:**
+- `session/route.ts` — bootstrap/resume library session (reuses Brain token).
+- `packs/route.ts` — GET (list by scope) / POST (create).
+- `packs/[packId]/route.ts` — GET / PATCH (rename/move scope) / DELETE (soft-delete).
+- `packs/[packId]/fork/route.ts` — POST (fork workspace pack into user scope).
+- `packs/[packId]/merge-base/route.ts` — POST (merge base changes into fork).
+- `packs/[packId]/versions/route.ts` — GET (list) / POST (publish pack version).
+- `packs/[packId]/versions/[versionId]/route.ts` — GET (resolve immutable pack).
+- `packs/[packId]/documents/route.ts` — GET (list) / POST (create doc).
+- `packs/[packId]/documents/[docId]/route.ts` — GET / PATCH (rename/move/role) / DELETE (soft-delete).
+- `packs/[packId]/documents/[docId]/versions/route.ts` — GET (list) / POST (save version; optimistic concurrency).
+- `packs/[packId]/documents/[docId]/versions/[versionId]/content/route.ts` — GET (raw content).
+- `packs/[packId]/skills/route.ts` — GET / POST.
+- `packs/[packId]/skills/[skillId]/route.ts` — GET / PATCH / DELETE.
+- `packs/[packId]/skills/[skillId]/versions/route.ts` — GET (list) / POST (publish skill version).
+- `packs/[packId]/merges/[mergeId]/resolve/route.ts` — POST (submit resolution).
+- `resolve/route.ts` — POST `{ scope, packId, packVersion, skillId }` → resolved skill bundle (FR-54 Live Library Mode).
+- `import/route.ts` — POST multipart/form-data (accepts `.nexus` archive or Agent Skills zip).
+- `export/route.ts` — POST `{ workflowJson }` → `.nexus` archive stream.
+
+**Client / store:**
+- `src/lib/library-client.ts` — typed fetch wrapper using the Brain token.
+- `src/store/library-docs/store.ts` — Zustand slice for packs, skills, documents, current selection, pending merges.
+- `src/store/library-docs/index.ts` — barrel.
+
+**Collaboration:**
+- `src/lib/collaboration/lib-doc-collab.ts` — per-document `Y.Text` binding to the Markdown editor. Room name `lib:{workspaceId}:{scope}:{packId}:{docId}`. On save / publish / export the server-side `LibraryStore` writes a snapshot from the current Y.js text.
+
+**UI:**
+- `src/components/workflow/documents-panel/panel.tsx` — library home: scope tabs (workspace / user-local), pack grid + search.
+- `src/components/workflow/documents-panel/pack-browser.tsx` — pack list, fork, rename, archive, soft-delete, restore.
+- `src/components/workflow/documents-panel/pack-detail.tsx` — four-column layout: file tree | editor | preview | skill details/validation/publish.
+- `src/components/workflow/documents-panel/file-tree.tsx` — renders docs grouped by role (SKILL.md, references, docs, rules, templates, examples, assets).
+- `src/components/workflow/documents-panel/doc-editor.tsx` — Markdown editor with Y.Text binding, presence, branch/base/head status badge.
+- `src/components/workflow/documents-panel/markdown-preview.tsx` — rendered preview (reuse `@uiw/react-md-editor` preview component).
+- `src/components/workflow/documents-panel/skill-detail-panel.tsx` — resolved skill bundle preview + validation warnings.
+- `src/components/workflow/documents-panel/publish-panel.tsx` — publish pack / publish skill dialogs with diff against latest published.
+- `src/components/workflow/documents-panel/branch-status-panel.tsx` — fork/branch state: clean / behind / conflict.
+- `src/components/workflow/documents-panel/conflict-resolve-dialog.tsx` — per-conflict manual resolver.
+- `src/components/workflow/documents-panel/import-dialog.tsx` — upload `.nexus` / Agent Skills zip.
+- `src/components/workflow/documents-panel/use-documents-panel-controller.ts` — controller hook.
+- `src/components/workflow/documents-panel/constants.ts`, `types.ts`, `index.ts`.
+- `src/components/workflow/properties/skill-picker-dialog.tsx` — skill picker for the workflow Skill node (FR-49, FR-50).
+
+**Types:**
+- `src/types/library.ts` — shared types for `LibraryScope`, `PackRef`, `SkillRef`, `SkillBundle`, `MergeState`, `ValidationWarning`. Import from `@/types/library`.
+
+### E2E test file (task below describes it; do NOT write it):
+- `docs/tasks/feature-documents-skill-library-60d267bf/e2e-feature-documents-skill-library-60d267bf.md`
+
+## Implementation Plan
+
+### Phase 1: Foundation (data model + storage)
+
+Build the metadata store, object-storage abstraction, and Zod schemas **without** UI. Provide enough API to create a library, create a pack, create a skill + doc, save document versions, publish pack + skill versions, compute hashes, list versions. Add tests for every storage primitive.
+
+### Phase 2: Editor and collaboration
+
+Wire the documents panel, pack browser, file tree, Markdown editor, preview, and a per-document Y.Text collab binding that shares a Hocuspocus room. Show branch/head/base status. On save → snapshot document version through the store.
+
+### Phase 3: Skills and validation
+
+Add skill creation (SKILL.md entrypoint), normalized manifest generation, resolved skill preview, validation panel, skill picker dialog. Wire the Skill node to reference `scope + packId + packVersion + skillId`. Add live resolver that returns a `SkillBundle`.
+
+### Phase 4: Publishing and branch merges
+
+Publish pack version (snapshots every document version id into `pack_version_documents`). Publish skill version (snapshots skill doc closure). Fork a workspace pack into user-local (creates branch with `base_version_id` per document). Merge-base pulls latest pack-version base content into the fork and runs three-way merge per document; conflicts get `document_merges` records and a conflict-resolution UI.
+
+### Phase 5: Export and import
+
+Self-contained `.nexus` zip export including `manifest.json`, `workflow.json`, `libraries/{scope}/packs/...`, `runtime/resolver-metadata.json`, `hashes.json`. Integrate with existing Generate/Export flow as a new target alongside OpenCode / PI / Claude Code, plus a new archive option. Add import for `.nexus` archives and best-effort Agent Skills folders/zips.
+
+## Step by Step Tasks
+
+IMPORTANT: Execute every step in order, top to bottom.
+
+### 1. Create library-store storage foundation (Phase 1, FR-13, FR-14, FR-21, FR-44, FR-45, FR-65)
+
+- Create `src/lib/library-store/config.ts`. Read `NEXUS_LIBRARY_DATA_DIR` (default `./.nexus-library`) and reuse `NEXUS_BRAIN_TOKEN_SECRET` via `getBrainConfig()` so tokens interoperate.
+- Create `src/lib/library-store/object-store.ts`. Implement the `ObjectStorage` interface from the spec (`putObject`, `getObject`, `deleteObject`, `objectExists`) with a filesystem driver anchored at the data dir. Keys follow the spec: `documents/{docId}/versions/{versionId}/content.md`, `documents/{docId}/versions/{versionId}/metadata.json`, `packs/{packId}/versions/{versionId}/manifest.json`, `exports/{exportId}/workflow-export.nexus`. Keep keys immutable — no overwrite for version objects.
+- Create `src/lib/library-store/hashing.ts` with `sha256(content: string|Buffer)` (use `node:crypto`).
+- Create `src/lib/library-store/types.ts` with the record types listed above. Use `"workspace" | "user"` for `LibraryScope` and allow future extension. Every record includes `deletedAt: string | null`, `createdAt`, `updatedAt`, optional `metadata`.
+
+### 2. Create Zod schemas for manifest + API payloads (FR-37, FR-38, FR-39, FR-41)
+
+- Create `src/lib/library-store/schemas.ts`. `import { z } from "zod/v4"` — **not** `"zod"`.
+- Define `ManifestSchemaV1` matching the spec "Manifest Shape" section: `schemaVersion: 1`, `packId`, `name`, `description`, `version`, `skills` (map of `skillId → { name, description, entrypoint, documents[], rules[] }`), `docs`, `rules`, `assets`.
+- Define request/response schemas for every API route (create pack, create skill, save document version, publish pack, merge-base, resolve-conflict, resolve, import, export).
+- Define `SkillFrontmatterSchema` for parsing `SKILL.md` YAML frontmatter (`name`, `description`, optional `compatibility`, optional `metadata`).
+
+### 3. Implement `LibraryStore` class (FR-1..FR-8, FR-9..FR-15, FR-30..FR-36, FR-42..FR-48)
+
+- Create `src/lib/library-store/store.ts` modelled on `BrainStore`. Singleton via `getLibraryStore()`; `resetLibraryStoreForTests()` export.
+- Manifest at `{dataDir}/manifest.json` with fields: `version: 1`, `libraries[]`, `packs[]`, `skills[]`, `documents[]`, `versions[]`, `packVersions[]`, `packVersionDocuments[]`, `skillVersions[]`, `skillVersionDocuments[]`, `branches[]`, `merges[]`, `conflicts[]`.
+- Methods:
+ - `createLibrary(workspaceId, scope, ownerUserId?)` — idempotent; returns existing workspace+user libraries if already present.
+ - `createPack(libraryId, input)` — unique `(libraryId, packKey)`; also creates an initial base branch.
+ - `forkPack(sourcePackId, targetLibraryId)` — copies pack record, copies skill + document rows, sets `base_pack_id`, `branch.base_version_id = source.current_version_id` per document.
+ - `softDeletePack` / `restorePack` / `renamePack` / `movePack`.
+ - `listPacks(libraryId, { includeDeleted })`.
+ - `searchPacks(libraryId, query)` — FR-5; matches name/description/tags/skill metadata/document content (simple linear scan for MVP).
+ - `createDocument(packId, { role, path, content, createdBy })` — stores object, creates `document`, `document_version` with `parentVersionId=null`.
+ - `saveDocumentVersion(docId, { content, previousVersionId, message, createdBy })` — FR-14 optimistic concurrency: reject if `previousVersionId` does not equal current head.
+ - `renameDocument` / `moveDocument` / `softDeleteDocument` / `restoreDocument`.
+ - `listDocuments(packId, { includeDeleted })`.
+ - `createSkill(packId, { skillKey, name, description, entrypointDocId })`.
+ - `listSkills(packId)` / `softDeleteSkill` / `renameSkill`.
+ - `publishPackVersion(packId, { version, createdBy })` — snapshots every current doc head into `pack_version_documents`, stores normalized `manifest.json` in RustFS-style key (`packs/{packId}/versions/{versionId}/manifest.json`), validates before committing (FR-48).
+ - `publishSkillVersion(skillId, { version, createdBy, linkToLatestPackVersion })` — snapshots entrypoint doc version + closure.
+ - `mergeBaseIntoBranch(packId, branchId, userId)` — runs merge per document (calls `merge.ts`). Clean merges create `document_versions`, update branch heads, write `document_merges` with `merged_cleanly`. Conflicts write `document_merges` `conflict` + `document_conflicts[]`. FR-22..FR-29.
+ - `resolveMergeConflict(mergeId, { resolvedContentByDocId, resolvedBy })` — completes the merge, updates heads.
+ - `resolveLive({ scope, packId, packVersion, skillId })` → `SkillBundle` (FR-54).
+ - `compareDraftToPublished(packId, publishedVersionId)` — diff counts per document (FR-46).
+- Every version write computes SHA-256 content hash and stores it in metadata JSON alongside the object.
+- Reuse `requireWorkspace(token)` from `src/lib/brain/server.ts` to bind libraries to Brain workspace ids.
+
+### 4. Implement three-way Markdown merge (FR-25, FR-26, FR-27, FR-29)
+
+- Create `src/lib/library-store/merge.ts` exporting `threeWayTextMerge(ancestor, theirs, yours)` returning `{ content, conflicts }`. Implement a simple line-based diff3 algorithm (no new dep) — common lines, identical changes are auto-kept; divergent edits produce a conflict block `<<<<<<< yours ... ======= ... >>>>>>> theirs` and a structured conflict entry with `{ path, conflictType: "text_conflict", ancestor, base, branch }`.
+- Unit tests (see step 15) verify clean merge, same-line conflict, identical concurrent edit, deleted-vs-edited, add-vs-add.
+
+### 5. Implement manifest building + validation (FR-36, FR-37, FR-41, Validation Requirements section)
+
+- Create `src/lib/library-store/manifest.ts`. `buildManifest(pack, skills, documents)` returns a `ManifestSchemaV1`-compatible object. Auto-map entrypoints, references from skill records, role-tagged documents → `docs` / `rules` / `assets`.
+- Create `src/lib/library-store/validation.ts`. Exports `validatePack(pack, skills, documents)` returning `ValidationWarning[]`. Cover every bullet in the spec "Validation Requirements" section: missing skill entrypoint, invalid `SKILL.md` frontmatter, missing description, duplicate pack/skill IDs, broken relative links (scan Markdown `[text](./path.md)` and image references), missing referenced documents, manifest path mismatch, deleted docs referenced by active skills, unresolved merge conflicts, missing export metadata, hash mismatch during import/open.
+- Expose frontmatter parser (`parseSkillFrontmatter(content)`) used by `createSkill` and validation.
+
+### 6. Build API routes (FR-1..FR-48, FR-49..FR-54 subset)
+
+- For each route under `src/app/api/library/**/route.ts`, follow the Brain route pattern: read `x-brain-token` via `getBrainTokenFromHeaders()`, call `requireWorkspace()`, validate body against the Zod schema, call `getLibraryStore().()`, return JSON.
+- Cover: session, packs (CRUD + fork + merge-base), documents (CRUD + versions with optimistic concurrency header `If-Match: ` **or** JSON body field), skills (CRUD + versions), publish flows, resolve, import, export.
+- For `POST /api/library/import` accept multipart or JSON and dispatch to `src/lib/library-store/import.ts`.
+- For `POST /api/library/export` accept `{ workflowJson }`, call `src/lib/library-store/export.ts`, stream the zip back with `Content-Type: application/zip` and `Content-Disposition: attachment; filename=".nexus"`.
+
+### 7. Update Skill node data model (FR-31, FR-49, FR-50, FR-51, FR-52)
+
+- In `src/nodes/skill/types.ts`, extend `SkillNodeData` with optional fields:
+ - `libraryRef?: { scope: "workspace" | "user"; packId: string; packVersion: string | "draft"; skillId: string } | null;`
+ - Keep existing `skillName`, `description`, `promptText`, etc. as fallback for inline skills (back-compat for existing workflows).
+- Update `src/nodes/skill/constants.ts` default data to `libraryRef: null`.
+- Update `src/components/workflow/properties/skill-fields.tsx` to render a "Link to library skill" section that opens the new `SkillPickerDialog` (see step 12). When a library ref is set, display pack name, pack version, skill name, deprecation/soft-delete warnings, and allow "Detach" to revert to inline mode.
+- Update `src/nodes/skill/generator.ts` `getSkillFile()` — when `libraryRef` is set and non-null, resolve via `resolveLive()` (live mode) or embed from the export artifact. For MVP the live resolution happens at export time; the live canvas preview still reads `libraryRef` to fetch content from the store (async) and falls back to `promptText` for offline editing.
+- Update `src/nodes/skill/node.tsx` renderer to show a pack badge when `libraryRef` is present.
+- Update `src/types/workflow.ts` to keep the union consistent.
+- Update `src/lib/workflow-generator.ts` → `collectAgentFiles()` to accept resolved skill bundles (pass them through the generator call). For export-target generation of `.opencode|.pi|.claude`, skill content must come from the resolved skill bundle (live or pinned) when a ref is set.
+
+### 8. Build library-docs Zustand store (FR-3, FR-7, FR-8, FR-52, FR-53)
+
+- Create `src/store/library-docs/store.ts` with actions mirroring the API: `bootstrap()`, `listPacks(scope)`, `createPack`, `forkPack`, `loadPackDetail`, `createDocument`, `saveDocument`, `renameDocument`, `softDeleteDocument`, `createSkill`, `publishPack`, `publishSkill`, `mergeBase`, `resolveConflict`, `resolveLiveSkill`.
+- Track `pendingMerges` and `fork behind base` flags per pack.
+- Subscribe to `useKnowledgeStore` session token to authenticate requests.
+
+### 9. Wire per-document Y.js collab binding (FR-10, FR-16..FR-21)
+
+- Create `src/lib/collaboration/lib-doc-collab.ts` exporting `openLibraryDocRoom(workspaceId, scope, packId, docId)` which returns `{ provider, yText, destroy }`. Room name: `lib:{workspaceId}:{scope}:{packId}:{docId}`. Reuse `getCollabServerUrl()` and `HocuspocusProvider`.
+- Hocuspocus already persists arbitrary room state through `src/lib/collaboration/object-store.ts`; no server changes required unless debounce tuning is needed for Markdown editing (keep current 1000 ms default).
+- When the document editor mounts, open the room; bind the MD editor's controlled value to `yText`. On "Save snapshot" (explicit button or 5-second idle), take the current `yText.toString()` and POST to `/api/library/packs/.../documents/.../versions` with `previousVersionId` from the last-known head. On success update the head in the store and show the new version in the version history list.
+- Broadcast editing presence via awareness (reuse `getOrCreateUserName`, color generator). FR-19.
+
+### 10. Build Documents panel UI (FR-9, FR-11, FR-12, FR-15, FR-34, FR-35)
+
+- Create `src/components/workflow/documents-panel/` per the New Files list. Follow the pattern of `library-panel/` (controller hook + view components, shadcn primitives, dark-theme tokens from `src/lib/theme.ts`).
+- Panel layout per spec: `[ Library / Packs ] [ File Tree ] [ Editor / Preview ] [ Skill Details / Validation ]`.
+- Editor reuses `@uiw/react-md-editor` (already in deps) with Y.Text binding.
+- File tree groups by document role (skill entrypoint, references, docs, rules, examples, templates, manifests, assets) — FR-12.
+- Show per-document status: branch name, base version id (short), head version id (short), "clean / behind / conflict" badge — FR-15.
+- Add/Rename/Move/Soft-delete/Restore document actions — FR-11.
+- Markdown preview and resolved skill preview (use `resolver.resolveLive()`).
+
+### 11. Build library home + pack browser (FR-1..FR-7)
+
+- `documents-panel/panel.tsx`: scope tabs (Workspace / User-local), pack grid (reuse `library-panel/cards.tsx` visual patterns). "New pack" button opens a dialog.
+- `pack-browser.tsx`: search input (FR-5), fork button on workspace packs (FR-6), "behind base" badge on forked packs (FR-7), soft-delete + restore affordances.
+- `pack-detail.tsx`: opens `pack-detail` view hosting the four-column layout.
+
+### 12. Build skill picker + workflow Skill node wiring (FR-49, FR-50)
+
+- Create `src/components/workflow/properties/skill-picker-dialog.tsx` — a Radix `` listing packs (grouped by scope), skills per pack, version dropdown (`draft` + published semver list). Selecting a skill writes `libraryRef` onto the Skill node and closes the dialog.
+- Emit warnings on the node when the referenced pack/skill is soft-deleted/deprecated — FR-52.
+- Update `src/components/workflow/properties/skill-fields.tsx` to add a "Library reference" section above the inline fields.
+
+### 13. Build validation panel (FR-36, FR-52)
+
+- `skill-detail-panel.tsx` shows `ValidationWarning[]` from `validatePack()`. Warnings re-run on every document save.
+
+### 14. Build publish UI (FR-42, FR-43, FR-46, FR-47, FR-48)
+
+- `publish-panel.tsx`:
+ - "Publish pack version" dialog: version string input (enforce semver regex via Zod), diff summary against latest published version (FR-46), validation must be clean before submit (FR-48).
+ - "Publish skill version" dialog: similar.
+ - List of published versions with badges for deprecated/soft-deleted; allow deprecate / undeprecate / soft-delete (FR-47).
+
+### 15. Build branch / fork / merge UI (FR-6, FR-7, FR-8, FR-22..FR-29)
+
+- `branch-status-panel.tsx` surfaces base pack version, branch head, and the "behind / clean / conflict" state computed by `store.getForkState(packId)`.
+- "Merge latest base" button calls `mergeBase`.
+- `conflict-resolve-dialog.tsx` renders each `document_conflicts` row with three side-by-side columns (ancestor, base, branch) and an editable resolution textarea; on submit call `resolveMergeConflict` with `{ resolvedContentByDocId }`.
+
+### 16. Build export pipeline (.nexus archive) (FR-55..FR-67)
+
+- Create `src/lib/library-store/export.ts`. Build a JSZip archive:
+ - `manifest.json` — top-level archive manifest: `{ schemaVersion: 1, workflowName, createdAt, createdBy, packs[], skills[], resolverMode }`.
+ - `workflow.json` — normalized workflow JSON (FR-56).
+ - `libraries/{scope}/packs/{packKey}/manifest.json` — normalized pack manifest (FR-58, FR-59, FR-61).
+ - `libraries/{scope}/packs/{packKey}/skills/{skillKey}/SKILL.md` — entrypoint doc content at export time.
+ - `libraries/{scope}/packs/{packKey}/docs/**` and `rules/**`, `assets/**` — referenced content (FR-57, FR-60).
+ - `runtime/resolver-metadata.json` — map `{ scope, packId, packVersion, skillId } → artifact path`, including content-hash references (FR-61).
+ - `hashes.json` — map path → sha256 (FR-65).
+- Traversal: for each Skill node with `libraryRef`, walk pack manifest to include every referenced document + pack-level docs/rules/assets needed by the skill. Snapshot drafts at current head; snapshot published versions at their version ID (FR-63, FR-64).
+- Integrity validation step before returning the archive (FR-66).
+- Add `buildGeneratedArchiveFilePath(workflowName)` helper to `src/lib/generation-targets.ts`.
+- Hook into the existing export dialog (`generated-export-dialog.tsx`) as a new archive option beside OpenCode / PI / Claude Code.
+
+### 17. Build import pipeline (FR-68..FR-72)
+
+- `src/lib/library-store/import.ts`:
+ - `importNexusArchive(buffer)` — validates `manifest.json` schema, re-hashes every file against `hashes.json` (FR-67), imports packs into the current workspace library, preserving `packKey`/`skillKey`. Collisions prompt for rename or merge (MVP: rename with `-imported-{n}`).
+ - `importAgentSkillsFolder(buffer)` — best-effort: for each `SKILL.md` found, create a skill + document; parse frontmatter; include sibling `references/**` etc. Sets pack provenance flag `external: true` (Security Requirements section).
+- `POST /api/library/import` route accepts multipart file uploads and dispatches.
+- Add `import-dialog.tsx` hooking the UI up.
+
+### 18. Add Brain / Hocuspocus integration to collab-doc.ts (FR-17, FR-18)
+
+- Extend `src/lib/collaboration/collab-doc.ts` OR create a sibling for library docs (prefer sibling to keep single-responsibility). Rooms are per-document (many small rooms) vs. the single workflow room.
+- No server change required — Hocuspocus server is generic (`scripts/collab-server.ts`).
+
+### 19. Deprecate/migrate overlap with Brain "documents" (optional, but clarify)
+
+- Existing Brain documents stay under `/api/brain`. The library is a new, parallel system. The `Prompt` and `Document` nodes retain their `brainDocId` field for existing users.
+- Add a migration helper `src/lib/library-store/brain-migration.ts` that offers a one-click "Import Brain docs into user library" button in the Documents panel (optional MVP polish; leave a TODO note if time-boxed out).
+
+### 20. Add workspace env + startup wiring
+
+- Update `.env.example` with `NEXUS_LIBRARY_DATA_DIR=.nexus-library`.
+- Update `scripts/start.sh` if it predefines Brain/collab dirs — mirror for library dir. Otherwise rely on defaults.
+- Update `.gitignore` to add `.nexus-library/`.
+- Update `Dockerfile` / `docker-compose.yml` if they mount Brain/collab dirs — add `.nexus-library` volume.
+
+### 21. Write storage tests (Phase 1 deliverable; repeat extending through Phase 4)
+
+- `src/lib/__tests__/library-store.test.ts`:
+ - Create workspace + user libraries (FR-1, FR-2).
+ - Create/list/soft-delete/restore pack (FR-3, FR-4).
+ - Create two skills + shared doc (AC-1).
+ - Save doc version rejects on stale `previousVersionId` (FR-14).
+ - Fork pack copies rows and sets `base_version_id` (FR-6, AC-2).
+ - Merge base into fork with no conflicts (AC-5).
+ - Merge base with same-line conflict produces `document_merges` + `document_conflicts` (AC-6).
+ - Resolve conflict updates branch head (FR-27).
+ - Publish pack version snapshots current doc heads (FR-42, AC-7).
+ - Publish skill version snapshots entrypoint closure (FR-43, AC-8).
+ - Soft-delete published version remains resolvable (FR-47, AC-12 precondition).
+- `src/lib/__tests__/library-merge.test.ts` covers diff3 edge cases.
+- `src/lib/__tests__/library-validation.test.ts` covers every rule in the Validation Requirements list (FR-36).
+- `src/lib/__tests__/library-export.test.ts`:
+ - Builds a `.nexus` archive containing workflow + packs + hashes.
+ - Hash validation round-trip (FR-65, FR-66, AC-10).
+ - Resolver works against artifact without live library (AC-11, FR-62).
+- `src/lib/__tests__/library-import.test.ts`:
+ - Round-trip Nexus-native export + import (FR-68, FR-70).
+ - Best-effort Agent Skills zip with single `SKILL.md` (FR-71, FR-72).
+- `src/lib/__tests__/library-resolver.test.ts`:
+ - Live resolution of a draft pack returns current head content.
+ - Live resolution of a pinned published version ignores subsequent draft edits.
+
+### 22. Write store tests
+
+- `src/store/__tests__/library-docs.test.ts`:
+ - `listPacks` populates state.
+ - `saveDocument` marks version history.
+ - `mergeBase` updates pending conflicts.
+ - Skill picker selection updates workflow node data.
+
+### 23. Write node generator test updates
+
+- Update `src/nodes/skill/__tests__/generator.test.ts` (create if missing under `src/nodes/skill/`):
+ - With no `libraryRef`, output matches existing inline behavior.
+ - With `libraryRef` and a resolved bundle, output uses pack-content Markdown and frontmatter.
+ - Deprecated/soft-deleted ref emits a warning path in generation log (non-fatal).
+
+### 24. Describe E2E test file (do NOT create)
+
+Describe `docs/tasks/feature-documents-skill-library-60d267bf/e2e-feature-documents-skill-library-60d267bf.md` with:
+
+- **User Story** — "As a workspace user, I can create a pack with two skills, fork it into my user-local library, edit a skill in real-time with a collaborator, publish a pack version, reference that skill in a workflow, and export a self-contained `.nexus` archive that resolves skill content offline."
+- **Test Steps** (playwright-cli):
+ 1. Open app at `http://localhost:3000` (screenshot).
+ 2. Open Documents panel from header toolbar (screenshot: empty workspace library).
+ 3. Click "New pack", enter `customer-support`, create (screenshot: pack detail view).
+ 4. Create skill `support-triage` with description "Classifies support requests." (screenshot: SKILL.md editor).
+ 5. Edit `SKILL.md` to contain `# Support Triage\nInitial instructions.`, save (screenshot: version history row appears).
+ 6. Add supporting document `references/escalation-policy.md` (screenshot: file tree shows two docs).
+ 7. Publish pack version `1.0.0` (screenshot: publish success toast; published list entry).
+ 8. Fork pack to user-local library (screenshot: user-local tab shows forked pack with "cleanly derived from workspace 1.0.0" badge).
+ 9. Back in workspace pack: edit `SKILL.md` to append `\nAdded v1.1 guidance.`, save, publish `1.1.0`.
+ 10. Switch to user-local fork, click "Merge latest base", expect clean merge (screenshot: merged doc contains appended text).
+ 11. Open a workflow, place a Skill node, open skill picker, select `workspace / customer-support / support-triage @ 1.1.0` (screenshot: Skill node shows pack badge).
+ 12. Open "Generate / Export" dialog, choose "Nexus archive", click export, capture download (screenshot: dialog showing archive summary).
+ 13. Open the resulting `.nexus` via the import dialog in a fresh workspace (or a simulated one), confirm the skill resolves with the saved content (screenshot: resolved skill preview).
+- **Success Criteria**:
+ - Pack and skill appear with the exact names above.
+ - Published versions list contains `1.0.0` and `1.1.0`.
+ - Forked pack shows "behind base" before merge and "in sync" after.
+ - Workflow Skill node displays pack `customer-support @ 1.1.0`.
+ - Exported archive contains `workflow.json`, `libraries/workspace/packs/customer-support/skills/support-triage/SKILL.md`, `hashes.json`, `runtime/resolver-metadata.json`.
+ - Re-importing the archive reproduces the skill content byte-for-byte (hash match).
+- **Screenshot capture points** — as listed at every numbered step.
+
+### 25. Update README and CLAUDE-style quickstart
+
+- Extend `README.md` with a new "Documents Skill Library" section summarizing workspace + user-local packs, publish, fork/merge, and `.nexus` export.
+- Update `CLAUDE.md` if the node count changes (it currently says "more than 11 nodes"; new node types are not added, just new node data fields — double-check wording).
+- Update `docs/tasks/conditional_docs.md` to add a condition pointing to a new doc `docs/tasks/documents-skill-library/doc-documents-skill-library.md` (write the doc as a short "what was built" summary mirroring the Brain doc).
+
+### 26. Validation pass
+
+- Run the `Validation Commands` below; fix any failure before closing.
+
+## Testing Strategy
+
+### Unit Tests
+
+- **Storage**: `library-store.test.ts` — all pack / skill / doc / version CRUD, soft-delete, optimistic concurrency rejection, publish semantics, branch/merge, conflict resolution.
+- **Merge**: `library-merge.test.ts` — diff3 clean/same-line conflict/add-add/delete-edit/trailing-newline cases.
+- **Validation**: `library-validation.test.ts` — all Validation Requirements entries.
+- **Export/Import**: `library-export.test.ts`, `library-import.test.ts` — round-trip Nexus-native, hash mismatches rejected, Agent Skills best-effort import.
+- **Resolver**: `library-resolver.test.ts` — live (draft head) vs. pinned published behavior; artifact-mode resolution.
+- **Schemas**: manifest + API payload Zod schemas round-trip.
+- **Skill node generator**: library-ref path writes SKILL.md from resolved bundle; inline path unchanged.
+
+### Edge Cases
+
+- Optimistic concurrency: two clients saving to the same document — second must receive a stale-head rejection.
+- Pack-level merge where half the documents merge cleanly and half conflict — merge record must aggregate (FR-28).
+- Soft-deleted published version: already-exported artifact still resolves (AC-12).
+- Draft pack reference in workflow export: content snapshots at export time (FR-63), live edits after export do not affect the artifact.
+- Forking a pack whose base is also forked (two-step derivation).
+- Importing a pack with duplicate `packKey` — rename with suffix.
+- Agent Skills folder with missing frontmatter — creates skill with placeholder description and emits a validation warning.
+- Large document (> 1 MB) — streaming save (no explicit size cap per PD-8).
+- Content hash mismatch during import — reject with clear error (FR-67).
+- Skill node references a pack version that no longer exists — generator emits inline placeholder + warning, not a crash.
+- Y.js save when Hocuspocus is offline: queue locally via `localStorage`, flush on reconnect.
+
+## Acceptance Criteria
+
+Every spec AC must be covered. Each bullet below is a pass/fail criterion.
+
+- **AC-1**: A workspace user can create a pack with two skills and shared docs. Verified by: Documents panel manual flow + `library-store.test.ts::createPackWithTwoSkills`.
+- **AC-2**: A user can fork that pack into their user-local library. Verified by: fork button + `library-store.test.ts::forkPack`.
+- **AC-3**: Two users can edit the same Markdown document in real time. Verified by: E2E + manual browser test with two tabs using Hocuspocus-backed `Y.Text`.
+- **AC-4**: Saving creates immutable document versions backed by the filesystem object store (RustFS-compatible key layout). Verified by: `library-store.test.ts::versionSnapshot` asserts file presence under `documents/{id}/versions/{v}/content.md`.
+- **AC-5**: A workspace pack update can be merged into a user-local fork. Verified by: `library-store.test.ts::mergeBaseClean`.
+- **AC-6**: A conflicting Markdown edit creates a conflict record instead of overwriting. Verified by: `library-merge.test.ts::sameLineConflict` + `library-store.test.ts::mergeBaseConflict`.
+- **AC-7**: A pack version can be published and later resolved by workflow nodes. Verified by: `library-store.test.ts::publishPackThenResolve`.
+- **AC-8**: An individual skill version can be published and resolved. Verified by: `library-store.test.ts::publishSkillThenResolve`.
+- **AC-9**: A workflow node can reference `scope + packVersion + skillId`. Verified by: Skill node type + generator tests + skill picker UI.
+- **AC-10**: A workflow export includes all required documents, skills, packs, metadata, assets, and hashes. Verified by: `library-export.test.ts::fullArchiveContents`.
+- **AC-11**: The exported artifact can resolve skill references without access to the live library. Verified by: `library-export.test.ts::resolveFromArtifactWithoutStore` (no network / store calls).
+- **AC-12**: Soft-deleting a live document or version does not break an already-exported workflow artifact. Verified by: `library-export.test.ts::softDeleteAfterExport`.
+
+Additional acceptance:
+
+- `bun run typecheck`, `bun run lint`, `bun run test`, and `bun run build` all pass.
+- No new `any` usage outside of documented cast patterns.
+- All Zod imports use `"zod/v4"`.
+- Dark-theme shadcn primitives reused; no hand-edits to `src/components/ui/`.
+- `.nexus-library/` added to `.gitignore`.
+- Documents panel opens without an active OpenCode connection (FR editor must work offline, per CLAUDE.md "offline/editor-only flows").
+
+## Validation Commands
+
+Execute every command to validate the work is complete with zero regressions.
+
+- `bun run typecheck` — TypeScript strict check.
+- `bun run lint` — ESLint (`--max-warnings=0`).
+- `bun run test` — full Bun test suite.
+- `bun run test:lib` — focused lib tests (fast signal while iterating).
+- `bun run test:store` — focused store tests.
+- `bun run test:nodes` — node generator + utility tests.
+- `bun run build` — Next.js production build (wiring / route / export regressions surface here).
+- Manual smoke (in browser at `http://localhost:3000`):
+ - Start both servers: `bun run collab:server` and `bun run dev`.
+ - Open Documents panel → create workspace pack → create skill → edit `SKILL.md` → save → see new version row.
+ - Fork pack → edit base → merge clean into fork.
+ - Publish pack version `1.0.0` → reference in Skill node → export `.nexus` archive → re-import → confirm resolved skill matches.
+
+## Notes
+
+- **No database in this repo.** The spec suggests Postgres tables; we persist the same semantic schema in a single JSON manifest plus per-record files under `.nexus-library/`. Downstream swap to Postgres is a storage-driver change.
+- **RustFS substitution.** The repo name suggests a future swap from filesystem to RustFS/S3. Keep `object-store.ts` behind an `ObjectStorage` interface so a replacement driver is a one-file change.
+- **Auth parity with Brain.** Library sessions reuse the Brain workspace id + HMAC token so share links and presence work with existing token plumbing.
+- **`.nexus` extension is provisional** — spec open question 7. Expose a helper so the extension can be changed in one place.
+- **Agent Skills style.** `SKILL.md` frontmatter remains compatible with existing generated output (`name`, `description`, `compatibility`, `metadata`) — keep the same frontmatter keys the current skill generator produces.
+- **Draft reference in live workflow** (FR-51, PD-6) requires the resolver to read current heads; leave a comment in the resolver noting that exports always snapshot (FR-63) so production runs are deterministic.
+- **No executable scripts at runtime** (spec Security Requirements; Non-Goals). Scripts are stored as documents with role `asset` or `script` but are not executed by this feature.
+- **Complexity classification: complex.** All phased sections above are included. If scope pressure arises, the hard-to-defer set is: data model, minimal UI, publish, and export — these are the AC spine (AC-1, 4, 7, 9, 10, 11). Branch/merge (AC-2, 5, 6) and user-local library are required by the MVP scope section and should not be deferred.
diff --git a/scripts/start.sh b/scripts/start.sh
index f190979..d926e61 100755
--- a/scripts/start.sh
+++ b/scripts/start.sh
@@ -5,6 +5,7 @@ ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
ENV_FILE="$ROOT_DIR/.env.local"
DEFAULT_BRAIN_DIR="$ROOT_DIR/.nexus-brain"
DEFAULT_COLLAB_DIR="$ROOT_DIR/.nexus-collab"
+DEFAULT_LIBRARY_DIR="$ROOT_DIR/.nexus-library"
log() {
printf '[start] %s\n' "$*"
@@ -56,11 +57,14 @@ start_local() {
local collab_dir="${NEXUS_COLLAB_DATA_DIR:-$DEFAULT_COLLAB_DIR}"
local collab_port="${NEXUS_COLLAB_SERVER_PORT:-1234}"
local collab_url="${NEXT_PUBLIC_COLLAB_SERVER_URL:-ws://localhost:${collab_port}}"
+ local library_dir="${NEXUS_LIBRARY_DATA_DIR:-$DEFAULT_LIBRARY_DIR}"
mkdir -p "$brain_dir"
mkdir -p "$collab_dir"
+ mkdir -p "$library_dir"
log "Using Brain data directory: $brain_dir"
log "Using collaboration data directory: $collab_dir"
+ log "Using Library data directory: $library_dir"
if [[ -z "$brain_secret" ]]; then
brain_secret="$(random_hex)"
@@ -71,12 +75,14 @@ start_local() {
ensure_env_value "NEXUS_COLLAB_DATA_DIR" "$collab_dir"
ensure_env_value "NEXUS_COLLAB_SERVER_PORT" "$collab_port"
ensure_env_value "NEXT_PUBLIC_COLLAB_SERVER_URL" "$collab_url"
+ ensure_env_value "NEXUS_LIBRARY_DATA_DIR" "$library_dir"
export NEXUS_BRAIN_DATA_DIR="$brain_dir"
export NEXUS_BRAIN_TOKEN_SECRET="$brain_secret"
export NEXUS_COLLAB_DATA_DIR="$collab_dir"
export NEXUS_COLLAB_SERVER_PORT="$collab_port"
export NEXT_PUBLIC_COLLAB_SERVER_URL="$collab_url"
+ export NEXUS_LIBRARY_DATA_DIR="$library_dir"
cd "$ROOT_DIR"
diff --git a/src/app/api/library/export/route.ts b/src/app/api/library/export/route.ts
new file mode 100644
index 0000000..557272f
--- /dev/null
+++ b/src/app/api/library/export/route.ts
@@ -0,0 +1,36 @@
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { buildNexusArchive } from "@/lib/library-store/export";
+import { exportRequestSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+export async function POST(request: Request) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const body = await request.json();
+ const parsed = exportRequestSchema.safeParse(body);
+ if (!parsed.success) {
+ return new Response(JSON.stringify({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }), {
+ status: 400,
+ headers: { "content-type": "application/json" },
+ });
+ }
+ const { buffer, archiveName } = await buildNexusArchive({
+ workflowJson: parsed.data.workflowJson,
+ workflowName: parsed.data.workflowName,
+ createdBy: parsed.data.createdBy,
+ });
+ return new Response(new Uint8Array(buffer), {
+ status: 200,
+ headers: {
+ "content-type": "application/zip",
+ "content-disposition": `attachment; filename="${archiveName}"`,
+ },
+ });
+ } catch (error) {
+ return new Response(JSON.stringify({ error: (error as Error).message }), {
+ status: 400,
+ headers: { "content-type": "application/json" },
+ });
+ }
+}
diff --git a/src/app/api/library/import/route.ts b/src/app/api/library/import/route.ts
new file mode 100644
index 0000000..e0d1052
--- /dev/null
+++ b/src/app/api/library/import/route.ts
@@ -0,0 +1,41 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { importNexusArchive, importAgentSkillsFolder } from "@/lib/library-store/import";
+
+export const dynamic = "force-dynamic";
+
+export async function POST(request: Request) {
+ try {
+ const workspaceId = await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const formData = await request.formData();
+ const file = formData.get("file");
+ if (!file || !(file instanceof File)) {
+ return NextResponse.json({ error: "Missing file upload" }, { status: 400 });
+ }
+ const format = (formData.get("format") as string | null) ?? "nexus";
+ const scope = (formData.get("scope") as "workspace" | "user" | null) ?? "workspace";
+ const buffer = Buffer.from(await file.arrayBuffer());
+
+ if (format === "agent-skills") {
+ const packKey = (formData.get("packKey") as string | null) ?? "imported-skills";
+ const result = await importAgentSkillsFolder({
+ buffer,
+ workspaceId,
+ ownerUserId: scope === "user" ? "default-user" : null,
+ scope,
+ packKey,
+ });
+ return NextResponse.json({ packs: result.packs });
+ }
+
+ const result = await importNexusArchive({
+ buffer,
+ workspaceId,
+ ownerUserId: scope === "user" ? "default-user" : null,
+ scope,
+ });
+ return NextResponse.json({ packs: result.packs });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/documents/[docId]/route.ts b/src/app/api/library/packs/[packId]/documents/[docId]/route.ts
new file mode 100644
index 0000000..c7aa3be
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/documents/[docId]/route.ts
@@ -0,0 +1,57 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+import { updateDocumentSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string; docId: string }>;
+}
+
+export async function GET(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId, docId } = await context.params;
+ const documents = await getLibraryStore().listDocuments(packId, { includeDeleted: true });
+ const document = documents.find((d) => d.id === docId);
+ if (!document) return NextResponse.json({ error: "Document not found" }, { status: 404 });
+ const content = await getLibraryStore().readDocumentContent(docId, document.currentVersionId);
+ return NextResponse.json({ document, content });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
+
+export async function PATCH(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { docId } = await context.params;
+ const body = await request.json();
+ const parsed = updateDocumentSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const store = getLibraryStore();
+ let document = await store.listDocuments((await store.readManifest()).documents.find((d) => d.id === docId)?.packId ?? "");
+ document = document.filter((d) => d.id === docId);
+ let updated = document[0];
+ if (parsed.data.path) {
+ updated = await store.renameDocument(docId, parsed.data.path);
+ }
+ return NextResponse.json({ document: updated });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
+
+export async function DELETE(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { docId } = await context.params;
+ await getLibraryStore().softDeleteDocument(docId);
+ return NextResponse.json({ deleted: true });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/documents/[docId]/versions/[versionId]/content/route.ts b/src/app/api/library/packs/[packId]/documents/[docId]/versions/[versionId]/content/route.ts
new file mode 100644
index 0000000..13982b5
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/documents/[docId]/versions/[versionId]/content/route.ts
@@ -0,0 +1,21 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string; docId: string; versionId: string }>;
+}
+
+export async function GET(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { docId, versionId } = await context.params;
+ const content = await getLibraryStore().readDocumentContent(docId, versionId);
+ if (content === null) return NextResponse.json({ error: "Version not found" }, { status: 404 });
+ return NextResponse.json({ content });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/documents/[docId]/versions/route.ts b/src/app/api/library/packs/[packId]/documents/[docId]/versions/route.ts
new file mode 100644
index 0000000..4935869
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/documents/[docId]/versions/route.ts
@@ -0,0 +1,48 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore, StaleVersionError } from "@/lib/library-store/store";
+import { saveDocumentVersionSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string; docId: string }>;
+}
+
+export async function GET(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { docId } = await context.params;
+ const versions = await getLibraryStore().listVersions(docId);
+ return NextResponse.json({ versions });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
+
+export async function POST(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { docId } = await context.params;
+ const body = await request.json();
+ const parsed = saveDocumentVersionSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const ifMatch = request.headers.get("If-Match");
+ const previousVersionId = ifMatch ?? parsed.data.previousVersionId;
+ const version = await getLibraryStore().saveDocumentVersion(docId, {
+ content: parsed.data.content,
+ previousVersionId,
+ message: parsed.data.message,
+ createdBy: parsed.data.createdBy,
+ metadata: parsed.data.metadata,
+ });
+ return NextResponse.json({ version });
+ } catch (error) {
+ if (error instanceof StaleVersionError) {
+ return NextResponse.json({ error: error.message }, { status: 409 });
+ }
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/documents/route.ts b/src/app/api/library/packs/[packId]/documents/route.ts
new file mode 100644
index 0000000..327cf58
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/documents/route.ts
@@ -0,0 +1,37 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+import { createDocumentSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string }>;
+}
+
+export async function GET(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId } = await context.params;
+ const documents = await getLibraryStore().listDocuments(packId);
+ return NextResponse.json({ documents });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
+
+export async function POST(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId } = await context.params;
+ const body = await request.json();
+ const parsed = createDocumentSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const result = await getLibraryStore().createDocument(packId, parsed.data);
+ return NextResponse.json(result);
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/fork/route.ts b/src/app/api/library/packs/[packId]/fork/route.ts
new file mode 100644
index 0000000..64da9e5
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/fork/route.ts
@@ -0,0 +1,30 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+import { forkPackSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string }>;
+}
+
+export async function POST(request: Request, context: RouteContext) {
+ try {
+ const workspaceId = await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId } = await context.params;
+ const body = await request.json().catch(() => ({}));
+ const parsed = forkPackSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const store = getLibraryStore();
+ const { workspace, user } = await store.ensureLibraries(workspaceId, parsed.data.targetScope === "user" ? "default-user" : null);
+ const lib = parsed.data.targetScope === "user" ? user : workspace;
+ if (!lib) return NextResponse.json({ error: "Target library unavailable" }, { status: 400 });
+ const pack = await store.forkPack(packId, lib.id, { packKey: parsed.data.packKey });
+ return NextResponse.json({ pack });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/merge-base/route.ts b/src/app/api/library/packs/[packId]/merge-base/route.ts
new file mode 100644
index 0000000..730bc7b
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/merge-base/route.ts
@@ -0,0 +1,26 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+import { mergeBaseSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string }>;
+}
+
+export async function POST(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId } = await context.params;
+ const body = await request.json().catch(() => ({}));
+ const parsed = mergeBaseSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const merge = await getLibraryStore().mergeBaseIntoBranch(packId, { initiatedBy: parsed.data.initiatedBy });
+ return NextResponse.json({ merge });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/merges/[mergeId]/resolve/route.ts b/src/app/api/library/packs/[packId]/merges/[mergeId]/resolve/route.ts
new file mode 100644
index 0000000..ad3fca5
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/merges/[mergeId]/resolve/route.ts
@@ -0,0 +1,37 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+import { resolveConflictSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string; mergeId: string }>;
+}
+
+export async function GET(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { mergeId } = await context.params;
+ const conflicts = await getLibraryStore().listConflicts(mergeId);
+ return NextResponse.json({ conflicts });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
+
+export async function POST(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { mergeId } = await context.params;
+ const body = await request.json();
+ const parsed = resolveConflictSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const merge = await getLibraryStore().resolveMergeConflict(mergeId, parsed.data);
+ return NextResponse.json({ merge });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/route.ts b/src/app/api/library/packs/[packId]/route.ts
new file mode 100644
index 0000000..87aa804
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/route.ts
@@ -0,0 +1,56 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+import { updatePackSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string }>;
+}
+
+export async function GET(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId } = await context.params;
+ const url = new URL(request.url);
+ const validate = url.searchParams.get("validate");
+ const store = getLibraryStore();
+ const pack = await store.getPack(packId);
+ if (validate === "1" && pack) {
+ const warnings = await store.validatePackById(packId);
+ return NextResponse.json({ pack, warnings });
+ }
+ return NextResponse.json({ pack });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
+
+export async function PATCH(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId } = await context.params;
+ const body = await request.json();
+ const parsed = updatePackSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const store = getLibraryStore();
+ const pack = await store.renamePack(packId, parsed.data);
+ return NextResponse.json({ pack });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
+
+export async function DELETE(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId } = await context.params;
+ await getLibraryStore().softDeletePack(packId);
+ return NextResponse.json({ deleted: true });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/skills/[skillId]/route.ts b/src/app/api/library/packs/[packId]/skills/[skillId]/route.ts
new file mode 100644
index 0000000..b90a2ad
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/skills/[skillId]/route.ts
@@ -0,0 +1,50 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+import { updateSkillSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string; skillId: string }>;
+}
+
+export async function GET(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId, skillId } = await context.params;
+ const skills = await getLibraryStore().listSkills(packId);
+ const skill = skills.find((s) => s.id === skillId);
+ if (!skill) return NextResponse.json({ error: "Skill not found" }, { status: 404 });
+ return NextResponse.json({ skill });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
+
+export async function PATCH(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { skillId } = await context.params;
+ const body = await request.json();
+ const parsed = updateSkillSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const skill = await getLibraryStore().updateSkill(skillId, parsed.data);
+ return NextResponse.json({ skill });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
+
+export async function DELETE(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { skillId } = await context.params;
+ await getLibraryStore().softDeleteSkill(skillId);
+ return NextResponse.json({ deleted: true });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/skills/[skillId]/versions/route.ts b/src/app/api/library/packs/[packId]/skills/[skillId]/versions/route.ts
new file mode 100644
index 0000000..3e8310f
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/skills/[skillId]/versions/route.ts
@@ -0,0 +1,37 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+import { publishSkillSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string; skillId: string }>;
+}
+
+export async function GET(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { skillId } = await context.params;
+ const versions = await getLibraryStore().listSkillVersions(skillId);
+ return NextResponse.json({ versions });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
+
+export async function POST(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { skillId } = await context.params;
+ const body = await request.json();
+ const parsed = publishSkillSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const skillVersion = await getLibraryStore().publishSkillVersion(skillId, parsed.data);
+ return NextResponse.json({ skillVersion });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/skills/route.ts b/src/app/api/library/packs/[packId]/skills/route.ts
new file mode 100644
index 0000000..06c73d2
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/skills/route.ts
@@ -0,0 +1,37 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+import { createSkillSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string }>;
+}
+
+export async function GET(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId } = await context.params;
+ const skills = await getLibraryStore().listSkills(packId);
+ return NextResponse.json({ skills });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
+
+export async function POST(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId } = await context.params;
+ const body = await request.json();
+ const parsed = createSkillSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const skill = await getLibraryStore().createSkill(packId, parsed.data);
+ return NextResponse.json({ skill });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/versions/[versionId]/route.ts b/src/app/api/library/packs/[packId]/versions/[versionId]/route.ts
new file mode 100644
index 0000000..765c511
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/versions/[versionId]/route.ts
@@ -0,0 +1,23 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string; versionId: string }>;
+}
+
+export async function GET(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId, versionId } = await context.params;
+ const versions = await getLibraryStore().listPackVersions(packId);
+ const packVersion = versions.find((v) => v.id === versionId);
+ if (!packVersion) return NextResponse.json({ error: "Pack version not found" }, { status: 404 });
+ const manifest = await getLibraryStore().getObjectStorage().getObjectAsString(packVersion.manifestKey);
+ return NextResponse.json({ packVersion, manifest: manifest ? JSON.parse(manifest) : null });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
diff --git a/src/app/api/library/packs/[packId]/versions/route.ts b/src/app/api/library/packs/[packId]/versions/route.ts
new file mode 100644
index 0000000..9b94080
--- /dev/null
+++ b/src/app/api/library/packs/[packId]/versions/route.ts
@@ -0,0 +1,40 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore, ValidationError } from "@/lib/library-store/store";
+import { publishPackSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+interface RouteContext {
+ params: Promise<{ packId: string }>;
+}
+
+export async function GET(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId } = await context.params;
+ const versions = await getLibraryStore().listPackVersions(packId);
+ return NextResponse.json({ versions });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
+
+export async function POST(request: Request, context: RouteContext) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const { packId } = await context.params;
+ const body = await request.json();
+ const parsed = publishPackSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const packVersion = await getLibraryStore().publishPackVersion(packId, parsed.data);
+ return NextResponse.json({ packVersion });
+ } catch (error) {
+ if (error instanceof ValidationError) {
+ return NextResponse.json({ error: error.message, warnings: error.warnings }, { status: 400 });
+ }
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/packs/route.ts b/src/app/api/library/packs/route.ts
new file mode 100644
index 0000000..515bb65
--- /dev/null
+++ b/src/app/api/library/packs/route.ts
@@ -0,0 +1,49 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+import { createPackSchema, libraryScopeSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+export async function GET(request: Request) {
+ try {
+ const workspaceId = await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const url = new URL(request.url);
+ const scopeParam = url.searchParams.get("scope");
+ const scope = scopeParam ? libraryScopeSchema.parse(scopeParam) : "workspace";
+ const store = getLibraryStore();
+ const { workspace, user } = await store.ensureLibraries(workspaceId, scope === "user" ? "default-user" : null);
+ const lib = scope === "user" ? user : workspace;
+ if (!lib) return NextResponse.json({ packs: [] });
+ const packs = await store.listPacks(lib.id);
+ return NextResponse.json({ packs });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 401 });
+ }
+}
+
+export async function POST(request: Request) {
+ try {
+ const workspaceId = await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const body = await request.json();
+ const parsed = createPackSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const store = getLibraryStore();
+ const { workspace, user } = await store.ensureLibraries(workspaceId, parsed.data.scope === "user" ? "default-user" : null);
+ const lib = parsed.data.scope === "user" ? user : workspace;
+ if (!lib) return NextResponse.json({ error: "Library not available" }, { status: 400 });
+ const pack = await store.createPack(lib.id, {
+ packKey: parsed.data.packKey,
+ name: parsed.data.name,
+ description: parsed.data.description,
+ tags: parsed.data.tags,
+ createdBy: parsed.data.createdBy,
+ metadata: parsed.data.metadata,
+ });
+ return NextResponse.json({ pack });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/resolve/route.ts b/src/app/api/library/resolve/route.ts
new file mode 100644
index 0000000..4215e72
--- /dev/null
+++ b/src/app/api/library/resolve/route.ts
@@ -0,0 +1,21 @@
+import { NextResponse } from "next/server";
+import { getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+import { resolveLiveSchema } from "@/lib/library-store/schemas";
+
+export const dynamic = "force-dynamic";
+
+export async function POST(request: Request) {
+ try {
+ await requireWorkspace(getBrainTokenFromHeaders(request.headers));
+ const body = await request.json();
+ const parsed = resolveLiveSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+ const bundle = await getLibraryStore().resolveLive(parsed.data);
+ return NextResponse.json({ bundle });
+ } catch (error) {
+ return NextResponse.json({ error: (error as Error).message }, { status: 400 });
+ }
+}
diff --git a/src/app/api/library/session/route.ts b/src/app/api/library/session/route.ts
new file mode 100644
index 0000000..00986a2
--- /dev/null
+++ b/src/app/api/library/session/route.ts
@@ -0,0 +1,33 @@
+import { NextResponse } from "next/server";
+import { sessionRequestSchema } from "@/lib/library-store/schemas";
+import { getBrainStore, getBrainTokenFromHeaders, requireWorkspace } from "@/lib/brain/server";
+import { getLibraryStore } from "@/lib/library-store/store";
+
+export const dynamic = "force-dynamic";
+
+export async function POST(request: Request) {
+ const body = await request.json().catch(() => ({}));
+ const parsed = sessionRequestSchema.safeParse(body);
+ if (!parsed.success) {
+ return NextResponse.json({ error: parsed.error.issues[0]?.message ?? "Invalid payload" }, { status: 400 });
+ }
+
+ const headerToken = getBrainTokenFromHeaders(request.headers);
+ const token = parsed.data.token ?? headerToken ?? null;
+ let workspaceId: string;
+ try {
+ workspaceId = await requireWorkspace(token);
+ } catch {
+ const session = await getBrainStore().createOrResumeSession(null, null);
+ workspaceId = session.workspaceId;
+ }
+
+ const store = getLibraryStore();
+ await store.ensureLibraries(workspaceId, parsed.data.ownerUserId ?? null);
+ const libraries = await store.listLibraries(workspaceId);
+ return NextResponse.json({
+ workspaceId,
+ ownerUserId: parsed.data.ownerUserId ?? null,
+ libraries,
+ });
+}
diff --git a/src/components/workflow/documents-panel/branch-status-panel.tsx b/src/components/workflow/documents-panel/branch-status-panel.tsx
new file mode 100644
index 0000000..b95d5cf
--- /dev/null
+++ b/src/components/workflow/documents-panel/branch-status-panel.tsx
@@ -0,0 +1,33 @@
+"use client";
+
+import { Badge } from "@/components/ui/badge";
+import { Button } from "@/components/ui/button";
+import type { PackRecord } from "@/lib/library-store/types";
+
+interface BranchStatusPanelProps {
+ pack: PackRecord;
+ hasPendingMerge: boolean;
+ onMergeBase: () => void;
+ onResolveConflicts?: () => void;
+}
+
+export function BranchStatusPanel({ pack, hasPendingMerge, onMergeBase, onResolveConflicts }: BranchStatusPanelProps) {
+ const isFork = pack.basePackId !== null;
+ if (!isFork) return null;
+ return (
+
+
+ forked
+ {pack.basePackId && (
+ base: {pack.basePackId.slice(0, 8)}
+ )}
+
+
+ Merge latest base
+ {hasPendingMerge && onResolveConflicts && (
+ Resolve conflicts
+ )}
+
+
+ );
+}
diff --git a/src/components/workflow/documents-panel/conflict-resolve-dialog.tsx b/src/components/workflow/documents-panel/conflict-resolve-dialog.tsx
new file mode 100644
index 0000000..a1e3545
--- /dev/null
+++ b/src/components/workflow/documents-panel/conflict-resolve-dialog.tsx
@@ -0,0 +1,67 @@
+"use client";
+
+import { useMemo, useState } from "react";
+import { Dialog, DialogContent, DialogFooter, DialogHeader, DialogTitle } from "@/components/ui/dialog";
+import { Button } from "@/components/ui/button";
+import { Textarea } from "@/components/ui/textarea";
+import type { ConflictRecord } from "@/lib/library-store/types";
+
+interface ConflictResolveDialogProps {
+ open: boolean;
+ onOpenChange: (open: boolean) => void;
+ conflicts: ConflictRecord[];
+ onResolve: (resolved: Record) => void;
+}
+
+export function ConflictResolveDialog({ open, onOpenChange, conflicts, onResolve }: ConflictResolveDialogProps) {
+ const initial = useMemo(() => {
+ const init: Record = {};
+ for (const c of conflicts) init[c.docId] = c.branchContent ?? "";
+ return init;
+ }, [conflicts]);
+ const [overrides, setOverrides] = useState>({});
+ const resolutions = { ...initial, ...overrides };
+ const setResolutions = (next: Record) => setOverrides(next);
+
+ return (
+
+
+
+ Resolve Merge Conflicts
+
+
+ {conflicts.map((conflict) => (
+
+
doc: {conflict.docId}
+
+
+
Ancestor
+
{conflict.ancestorContent}
+
+
+
Base
+
{conflict.baseContent}
+
+
+
Branch
+
{conflict.branchContent}
+
+
+
+ ))}
+
+
+ onOpenChange(false)}>Cancel
+ { onResolve(resolutions); onOpenChange(false); }}>Submit
+
+
+
+ );
+}
diff --git a/src/components/workflow/documents-panel/constants.ts b/src/components/workflow/documents-panel/constants.ts
new file mode 100644
index 0000000..e2f5189
--- /dev/null
+++ b/src/components/workflow/documents-panel/constants.ts
@@ -0,0 +1,14 @@
+import { buildWorkflowPanelShellClass, WORKFLOW_PANEL_SURFACE_CLASS } from "../panel-primitives";
+
+export const DOCUMENTS_PANEL_SHELL_CLASS = buildWorkflowPanelShellClass("top-4 right-4");
+export const DOCUMENTS_PANEL_SURFACE_CLASS = WORKFLOW_PANEL_SURFACE_CLASS;
+export const DOCUMENT_ROLES = [
+ "skill-entrypoint",
+ "reference",
+ "doc",
+ "rule",
+ "template",
+ "example",
+ "asset",
+ "script",
+] as const;
diff --git a/src/components/workflow/documents-panel/doc-editor.tsx b/src/components/workflow/documents-panel/doc-editor.tsx
new file mode 100644
index 0000000..2e1c84e
--- /dev/null
+++ b/src/components/workflow/documents-panel/doc-editor.tsx
@@ -0,0 +1,59 @@
+"use client";
+
+import { useEffect } from "react";
+import { MarkdownEditor } from "@/components/ui/markdown-editor";
+import { Button } from "@/components/ui/button";
+import { openLibraryDocRoom } from "@/lib/collaboration/lib-doc-collab";
+import type { LibraryDocumentRecord } from "@/lib/library-store/types";
+import type { LibraryScope } from "@/types/library";
+
+interface DocEditorProps {
+ workspaceId: string | null;
+ scope: LibraryScope;
+ packId: string;
+ document: LibraryDocumentRecord;
+ value: string;
+ onChange: (value: string) => void;
+ onSave: () => void;
+ saving?: boolean;
+}
+
+export function DocEditor({ workspaceId, scope, packId, document, value, onChange, onSave, saving }: DocEditorProps) {
+ useEffect(() => {
+ if (!workspaceId) return;
+ const room = openLibraryDocRoom({
+ workspaceId,
+ scope,
+ packId,
+ docId: document.id,
+ initialContent: value,
+ });
+ const handler = () => {
+ const text = room.yText.toString();
+ if (text && text !== value) {
+ onChange(text);
+ }
+ };
+ room.yText.observe(handler);
+ return () => {
+ room.yText.unobserve(handler);
+ };
+ }, [workspaceId, scope, packId, document.id]); // eslint-disable-line react-hooks/exhaustive-deps
+
+ return (
+
+
+
{document.path}
+
+ v{document.currentVersionId.slice(0, 6)}
+
+ {saving ? "Saving…" : "Save snapshot"}
+
+
+
+
+
+
+
+ );
+}
diff --git a/src/components/workflow/documents-panel/file-tree.tsx b/src/components/workflow/documents-panel/file-tree.tsx
new file mode 100644
index 0000000..bb0c4a6
--- /dev/null
+++ b/src/components/workflow/documents-panel/file-tree.tsx
@@ -0,0 +1,103 @@
+"use client";
+
+import { useMemo } from "react";
+import { ChevronRight, File, FileText, Wrench, BookOpen, FileWarning, Code, Image } from "lucide-react";
+import type { LibraryDocumentRecord, DocumentRole } from "@/lib/library-store/types";
+
+const ROLE_ICONS: Record> = {
+ "skill-entrypoint": Wrench,
+ reference: BookOpen,
+ doc: FileText,
+ rule: FileWarning,
+ template: File,
+ example: File,
+ asset: Image,
+ script: Code,
+ manifest: File,
+};
+
+const ROLE_LABELS: Record = {
+ "skill-entrypoint": "SKILL.md",
+ reference: "References",
+ doc: "Docs",
+ rule: "Rules",
+ template: "Templates",
+ example: "Examples",
+ asset: "Assets",
+ script: "Scripts",
+ manifest: "Manifests",
+};
+
+interface FileTreeProps {
+ documents: LibraryDocumentRecord[];
+ selectedDocId: string | null;
+ onSelect: (docId: string) => void;
+ onCreate?: (role: DocumentRole) => void;
+ onDelete?: (docId: string) => void;
+}
+
+export function FileTree({ documents, selectedDocId, onSelect, onCreate, onDelete }: FileTreeProps) {
+ const grouped = useMemo(() => {
+ const map = new Map();
+ for (const doc of documents.filter((d) => d.deletedAt === null)) {
+ const list = map.get(doc.role) ?? [];
+ list.push(doc);
+ map.set(doc.role, list);
+ }
+ return map;
+ }, [documents]);
+
+ return (
+
+ {(Object.keys(ROLE_LABELS) as DocumentRole[]).map((role) => {
+ const items = grouped.get(role) ?? [];
+ const Icon = ROLE_ICONS[role];
+ return (
+
+
+
+ {ROLE_LABELS[role]}
+
+ {onCreate && (
+ onCreate(role)} className="text-cyan-400 hover:text-cyan-300 text-[10px]">
+ + add
+
+ )}
+
+ {items.length === 0 ? (
+
empty
+ ) : (
+
+ {items.map((doc) => (
+
+ onSelect(doc.id)}
+ className={`w-full text-left px-2 py-1 rounded text-xs flex items-center gap-1.5 group ${selectedDocId === doc.id ? "bg-cyan-950/60 text-cyan-100" : "text-zinc-300 hover:bg-zinc-800/50"}`}
+ >
+
+ {doc.path}
+ {onDelete && (
+ {
+ e.stopPropagation();
+ onDelete(doc.id);
+ }}
+ className="ml-auto opacity-0 group-hover:opacity-100 text-[10px] text-zinc-500 hover:text-red-400 px-1"
+ >
+ ×
+
+ )}
+
+
+ ))}
+
+ )}
+
+ );
+ })}
+
+ );
+}
diff --git a/src/components/workflow/documents-panel/import-dialog.tsx b/src/components/workflow/documents-panel/import-dialog.tsx
new file mode 100644
index 0000000..10db6af
--- /dev/null
+++ b/src/components/workflow/documents-panel/import-dialog.tsx
@@ -0,0 +1,52 @@
+"use client";
+
+import { useRef, useState } from "react";
+import { Dialog, DialogContent, DialogFooter, DialogHeader, DialogTitle } from "@/components/ui/dialog";
+import { Button } from "@/components/ui/button";
+
+interface ImportDialogProps {
+ open: boolean;
+ onOpenChange: (open: boolean) => void;
+ onImport: (file: File) => Promise;
+}
+
+export function ImportDialog({ open, onOpenChange, onImport }: ImportDialogProps) {
+ const inputRef = useRef(null);
+ const [importing, setImporting] = useState(false);
+
+ const handleFile = async (file: File) => {
+ setImporting(true);
+ try {
+ await onImport(file);
+ onOpenChange(false);
+ } finally {
+ setImporting(false);
+ }
+ };
+
+ return (
+
+
+
+ Import .nexus archive
+
+
+
Select a .nexus archive or Agent Skills zip to import into the current scope.
+
{
+ const file = e.target.files?.[0];
+ if (file) void handleFile(file);
+ }}
+ className="block w-full text-sm text-zinc-300 file:mr-4 file:rounded-md file:border-0 file:bg-cyan-700 file:px-3 file:py-1.5 file:text-white"
+ />
+
+
+ onOpenChange(false)} disabled={importing}>Close
+
+
+
+ );
+}
diff --git a/src/components/workflow/documents-panel/index.ts b/src/components/workflow/documents-panel/index.ts
new file mode 100644
index 0000000..3c3841d
--- /dev/null
+++ b/src/components/workflow/documents-panel/index.ts
@@ -0,0 +1,3 @@
+export { default as DocumentsPanel } from "./panel";
+export { useDocumentsPanelController } from "./use-documents-panel-controller";
+export type { DocumentsPanelProps } from "./types";
diff --git a/src/components/workflow/documents-panel/markdown-preview.tsx b/src/components/workflow/documents-panel/markdown-preview.tsx
new file mode 100644
index 0000000..e89811e
--- /dev/null
+++ b/src/components/workflow/documents-panel/markdown-preview.tsx
@@ -0,0 +1,18 @@
+"use client";
+
+import dynamic from "next/dynamic";
+import "@uiw/react-markdown-preview/markdown.css";
+
+const MDPreview = dynamic(() => import("@uiw/react-md-editor").then((mod) => mod.default.Markdown as React.ComponentType<{ source: string }>), { ssr: false });
+
+interface MarkdownPreviewProps {
+ source: string;
+}
+
+export function MarkdownPreview({ source }: MarkdownPreviewProps) {
+ return (
+
+
+
+ );
+}
diff --git a/src/components/workflow/documents-panel/pack-browser.tsx b/src/components/workflow/documents-panel/pack-browser.tsx
new file mode 100644
index 0000000..31a4f17
--- /dev/null
+++ b/src/components/workflow/documents-panel/pack-browser.tsx
@@ -0,0 +1,133 @@
+"use client";
+
+import { useState } from "react";
+import { Button } from "@/components/ui/button";
+import { Input } from "@/components/ui/input";
+import { Badge } from "@/components/ui/badge";
+import { GitFork, Plus, Search, Trash2 } from "lucide-react";
+import type { PackRecord } from "@/lib/library-store/types";
+
+interface PackBrowserProps {
+ packs: PackRecord[];
+ selectedPackId: string | null;
+ onSelectPack: (packId: string) => void;
+ onCreatePack: (packKey: string, name: string) => Promise;
+ onForkPack: (packId: string) => Promise;
+ onDeletePack: (packId: string) => Promise;
+}
+
+export function PackBrowser({ packs, selectedPackId, onSelectPack, onCreatePack, onForkPack, onDeletePack }: PackBrowserProps) {
+ const [query, setQuery] = useState("");
+ const [creating, setCreating] = useState(false);
+ const [newPackKey, setNewPackKey] = useState("");
+ const [newPackName, setNewPackName] = useState("");
+
+ const filtered = packs.filter((p) => {
+ if (!query.trim()) return true;
+ const q = query.toLowerCase();
+ return p.name.toLowerCase().includes(q) || p.packKey.toLowerCase().includes(q) || p.tags.some((t) => t.toLowerCase().includes(q));
+ });
+
+ return (
+
+
+
+
+ setQuery(e.target.value)}
+ className="pl-7 bg-zinc-900 border-zinc-800 h-8 text-sm"
+ />
+
+
setCreating((c) => !c)} className="h-8">
+ New
+
+
+ {creating && (
+
+ )}
+
+ {filtered.length === 0 ? (
+ No packs in this library.
+ ) : (
+ filtered.map((pack) => (
+
+ onSelectPack(pack.id)}
+ >
+
+
+
{pack.name}
+
{pack.packKey}
+ {pack.basePackId && (
+
+ forked
+
+ )}
+
+
+ {!pack.basePackId && (
+ {
+ e.stopPropagation();
+ void onForkPack(pack.id);
+ }}
+ className="p-1 rounded hover:bg-zinc-800 text-zinc-400 hover:text-cyan-300"
+ title="Fork to user library"
+ >
+
+
+ )}
+ {
+ e.stopPropagation();
+ void onDeletePack(pack.id);
+ }}
+ className="p-1 rounded hover:bg-zinc-800 text-zinc-400 hover:text-red-400"
+ title="Soft-delete"
+ >
+
+
+
+
+
+
+ ))
+ )}
+
+
+ );
+}
diff --git a/src/components/workflow/documents-panel/pack-detail.tsx b/src/components/workflow/documents-panel/pack-detail.tsx
new file mode 100644
index 0000000..0c2cf0f
--- /dev/null
+++ b/src/components/workflow/documents-panel/pack-detail.tsx
@@ -0,0 +1,212 @@
+"use client";
+
+import { useState } from "react";
+import { Button } from "@/components/ui/button";
+import { Input } from "@/components/ui/input";
+import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
+import { FileTree } from "./file-tree";
+import { DocEditor } from "./doc-editor";
+import { MarkdownPreview } from "./markdown-preview";
+import { SkillDetailPanel } from "./skill-detail-panel";
+import { PublishPanel } from "./publish-panel";
+import { BranchStatusPanel } from "./branch-status-panel";
+import { ConflictResolveDialog } from "./conflict-resolve-dialog";
+import type {
+ ConflictRecord,
+ LibraryDocumentRecord,
+ MergeRecord,
+ PackRecord,
+ PackVersionRecord,
+ SkillRecord,
+ ValidationWarning,
+ DocumentRole,
+} from "@/lib/library-store/types";
+import type { LibraryScope } from "@/types/library";
+
+interface PackDetailProps {
+ workspaceId: string | null;
+ scope: LibraryScope;
+ pack: PackRecord;
+ documents: LibraryDocumentRecord[];
+ skills: SkillRecord[];
+ packVersions: PackVersionRecord[];
+ selectedDocument: LibraryDocumentRecord | null;
+ selectedDocId: string | null;
+ draftContent: string;
+ setDraftContent: (value: string) => void;
+ saving: boolean;
+ validationWarnings: ValidationWarning[];
+ pendingMerge?: MergeRecord;
+ conflicts: ConflictRecord[];
+ onSelectDocument: (docId: string) => void;
+ onCreateDocument: (role: DocumentRole, path: string, content: string) => void;
+ onSaveDocument: () => void;
+ onCreateSkill: (skillKey: string, name: string, description: string, entrypointDocId: string) => void;
+ onPublishPack: (version: string, notes?: string) => void;
+ onPublishSkill: (skillId: string) => void;
+ onMergeBase: () => void;
+ onResolveConflicts: (resolved: Record) => void;
+ onValidate: () => void;
+ onDeleteDocument: (docId: string) => void;
+ onDeleteSkill: (skillId: string) => void;
+}
+
+export function PackDetail(props: PackDetailProps) {
+ const [creatingDoc, setCreatingDoc] = useState(null);
+ const [docPath, setDocPath] = useState("");
+ const [creatingSkill, setCreatingSkill] = useState(false);
+ const [skillKey, setSkillKey] = useState("");
+ const [skillName, setSkillName] = useState("");
+ const [skillDescription, setSkillDescription] = useState("");
+ const [skillEntrypointId, setSkillEntrypointId] = useState("");
+ const [conflictDialogOpen, setConflictDialogOpen] = useState(false);
+
+ const skillEntrypoints = props.documents.filter((d) => d.role === "skill-entrypoint" && d.deletedAt === null);
+
+ return (
+
+
+
+ {props.selectedDocument ? (
+
+
+ Edit
+ Preview
+
+
+
+
+
+
+
+
+ ) : (
+
+ Select a document to edit
+
+ )}
+
+
+
props.onPublishSkill(skillId)}
+ onDeleteSkill={props.onDeleteSkill}
+ />
+
+
+
+
+ );
+}
diff --git a/src/components/workflow/documents-panel/panel.tsx b/src/components/workflow/documents-panel/panel.tsx
new file mode 100644
index 0000000..6d642de
--- /dev/null
+++ b/src/components/workflow/documents-panel/panel.tsx
@@ -0,0 +1,136 @@
+"use client";
+
+import { useState } from "react";
+import { Button } from "@/components/ui/button";
+import { Tabs, TabsList, TabsTrigger } from "@/components/ui/tabs";
+import { Library, Upload, X } from "lucide-react";
+import { useDocumentsPanelController } from "./use-documents-panel-controller";
+import { PackBrowser } from "./pack-browser";
+import { PackDetail } from "./pack-detail";
+import { ImportDialog } from "./import-dialog";
+import { useLibraryDocsStore } from "@/store/library-docs";
+import type { ValidationWarning } from "@/types/library";
+import {
+ DOCUMENTS_PANEL_SHELL_CLASS,
+ DOCUMENTS_PANEL_SURFACE_CLASS,
+} from "./constants";
+import type { DocumentsPanelProps } from "./types";
+
+const EMPTY_WARNINGS: ValidationWarning[] = [];
+
+export default function DocumentsPanel({ open, onClose }: DocumentsPanelProps) {
+ const c = useDocumentsPanelController(open);
+ const selectedPackId = c.selectedPack?.id;
+ const validationWarnings = useLibraryDocsStore(
+ (s) => (selectedPackId ? s.validationWarnings[selectedPackId] : undefined) ?? EMPTY_WARNINGS,
+ );
+ const [importOpen, setImportOpen] = useState(false);
+
+ return (
+ <>
+
+
+
+
+
+ Documents Skill Library
+
+
+ setImportOpen(true)}>
+ Import
+
+
+
+
+
+
+
+
+
c.setScope(v as "workspace" | "user")}>
+
+ Workspace
+ User-local
+
+
+
{
+ await useLibraryDocsStore.getState().softDeletePack(id);
+ }}
+ />
+
+
+ {c.selectedPack && c.workspaceId ? (
+
{
+ const v = window.prompt("Skill version (semver)");
+ if (v) c.publishSkill(skillId, v);
+ }}
+ onMergeBase={c.mergeBase}
+ onResolveConflicts={async (resolved) => {
+ if (!c.selectedPack) return;
+ const merge = c.pendingMerges[c.selectedPack.id];
+ if (!merge) return;
+ await useLibraryDocsStore.getState().resolveConflict(c.selectedPack.id, merge.id, resolved);
+ }}
+ onValidate={c.validatePack}
+ onDeleteDocument={async (docId) => {
+ if (!c.selectedPack) return;
+ await useLibraryDocsStore.getState().deleteDocument(c.selectedPack.id, docId);
+ }}
+ onDeleteSkill={async (skillId) => {
+ if (!c.selectedPack) return;
+ await useLibraryDocsStore.getState().deleteSkill(c.selectedPack.id, skillId);
+ }}
+ />
+ ) : (
+
+ Select or create a pack to begin
+
+ )}
+
+
+
+
+
+ >
+ );
+}
diff --git a/src/components/workflow/documents-panel/publish-panel.tsx b/src/components/workflow/documents-panel/publish-panel.tsx
new file mode 100644
index 0000000..7889802
--- /dev/null
+++ b/src/components/workflow/documents-panel/publish-panel.tsx
@@ -0,0 +1,68 @@
+"use client";
+
+import { useState } from "react";
+import { Button } from "@/components/ui/button";
+import { Input } from "@/components/ui/input";
+import { Label } from "@/components/ui/label";
+import type { PackVersionRecord } from "@/lib/library-store/types";
+
+interface PublishPanelProps {
+ packVersions: PackVersionRecord[];
+ onPublishPack: (version: string, notes: string) => void;
+}
+
+export function PublishPanel({ packVersions, onPublishPack }: PublishPanelProps) {
+ const [version, setVersion] = useState("");
+ const [notes, setNotes] = useState("");
+
+ return (
+
+
Publish Pack
+
+ Version (semver)
+ setVersion(e.target.value)}
+ placeholder="1.0.0"
+ className="bg-zinc-900 border-zinc-800 mt-1"
+ />
+
+
+ Notes
+ setNotes(e.target.value)}
+ className="bg-zinc-900 border-zinc-800 mt-1"
+ />
+
+
{
+ onPublishPack(version, notes);
+ setVersion("");
+ setNotes("");
+ }}
+ disabled={!version}
+ >
+ Publish version
+
+
+
Published versions
+ {packVersions.length === 0 ? (
+
No published versions.
+ ) : (
+
+ {packVersions.map((pv) => (
+
+ {pv.version} {new Date(pv.createdAt).toLocaleString()}
+ {pv.deprecated && deprecated }
+
+ ))}
+
+ )}
+
+
+ );
+}
diff --git a/src/components/workflow/documents-panel/skill-detail-panel.tsx b/src/components/workflow/documents-panel/skill-detail-panel.tsx
new file mode 100644
index 0000000..94c538f
--- /dev/null
+++ b/src/components/workflow/documents-panel/skill-detail-panel.tsx
@@ -0,0 +1,63 @@
+"use client";
+
+import { Badge } from "@/components/ui/badge";
+import { Button } from "@/components/ui/button";
+import type { SkillRecord, ValidationWarning } from "@/lib/library-store/types";
+
+interface SkillDetailPanelProps {
+ skills: SkillRecord[];
+ validationWarnings: ValidationWarning[];
+ onPublishSkill: (skillId: string) => void;
+ onDeleteSkill: (skillId: string) => void;
+}
+
+export function SkillDetailPanel({ skills, validationWarnings, onPublishSkill, onDeleteSkill }: SkillDetailPanelProps) {
+ return (
+
+
+
Skills
+ {skills.length === 0 ? (
+
No skills yet.
+ ) : (
+
+ {skills.map((skill) => (
+
+
+
+
{skill.name}
+
{skill.skillKey}
+
+
+ onPublishSkill(skill.id)}>
+ Publish
+
+ onDeleteSkill(skill.id)}>
+ Delete
+
+
+
+ {skill.description && {skill.description}
}
+ {skill.deprecated && deprecated }
+
+ ))}
+
+ )}
+
+
+
Validation
+ {validationWarnings.length === 0 ? (
+
All clean.
+ ) : (
+
+ {validationWarnings.map((w, i) => (
+
+ {w.code}
+ {w.message}
+
+ ))}
+
+ )}
+
+
+ );
+}
diff --git a/src/components/workflow/documents-panel/types.ts b/src/components/workflow/documents-panel/types.ts
new file mode 100644
index 0000000..4c2b1bf
--- /dev/null
+++ b/src/components/workflow/documents-panel/types.ts
@@ -0,0 +1,10 @@
+import type { LibraryScope } from "@/types/library";
+
+export interface DocumentsPanelProps {
+ open: boolean;
+ onClose: () => void;
+}
+
+export interface DocumentsPanelControllerState {
+ scope: LibraryScope;
+}
diff --git a/src/components/workflow/documents-panel/use-documents-panel-controller.ts b/src/components/workflow/documents-panel/use-documents-panel-controller.ts
new file mode 100644
index 0000000..52ff7e4
--- /dev/null
+++ b/src/components/workflow/documents-panel/use-documents-panel-controller.ts
@@ -0,0 +1,235 @@
+"use client";
+
+import { useCallback, useEffect, useMemo, useState } from "react";
+import { useShallow } from "zustand/react/shallow";
+import { toast } from "sonner";
+import { useLibraryDocsStore } from "@/store/library-docs";
+import type { LibraryScope } from "@/types/library";
+import type { LibraryDocumentRecord } from "@/lib/library-store/types";
+
+export function useDocumentsPanelController(open: boolean) {
+ const store = useLibraryDocsStore(
+ useShallow((s) => ({
+ workspaceId: s.workspaceId,
+ workspacePacks: s.workspacePacks,
+ userPacks: s.userPacks,
+ selectedPackId: s.selectedPackId,
+ documents: s.documents,
+ skills: s.skills,
+ packVersions: s.packVersions,
+ pendingMerges: s.pendingMerges,
+ conflicts: s.conflicts,
+ loading: s.loading,
+ saving: s.saving,
+ bootstrap: s.bootstrap,
+ selectPack: s.selectPack,
+ loadPackDetail: s.loadPackDetail,
+ loadDocumentContent: s.loadDocumentContent,
+ createPack: s.createPack,
+ createDocument: s.createDocument,
+ saveDocument: s.saveDocument,
+ createSkill: s.createSkill,
+ publishPack: s.publishPack,
+ publishSkill: s.publishSkill,
+ forkPack: s.forkPack,
+ mergeBase: s.mergeBase,
+ validatePackById: s.validatePackById,
+ importArchive: s.importArchive,
+ })),
+ );
+ const [scope, setScope] = useState("workspace");
+ const [selectedDocId, setSelectedDocId] = useState(null);
+ const [draftContent, setDraftContent] = useState("");
+
+ const bootstrap = store.bootstrap;
+ useEffect(() => {
+ if (!open) return;
+ void bootstrap();
+ }, [open, bootstrap]);
+
+ const packs = scope === "workspace" ? store.workspacePacks : store.userPacks;
+ const selectedPack = packs.find((p) => p.id === store.selectedPackId) ?? null;
+ const documents = useMemo(
+ () => (store.selectedPackId ? store.documents[store.selectedPackId] ?? [] : []),
+ [store.selectedPackId, store.documents],
+ );
+ const skills = store.selectedPackId ? store.skills[store.selectedPackId] ?? [] : [];
+ const packVersions = store.selectedPackId ? store.packVersions[store.selectedPackId] ?? [] : [];
+ const selectedDocument = documents.find((d) => d.id === selectedDocId) ?? null;
+
+ const selectPack = useCallback(
+ async (packId: string) => {
+ store.selectPack(packId);
+ await store.loadPackDetail(packId);
+ setSelectedDocId(null);
+ },
+ [store],
+ );
+
+ const selectDocument = useCallback(
+ async (docId: string) => {
+ setSelectedDocId(docId);
+ const document = documents.find((d) => d.id === docId);
+ if (!document || !store.selectedPackId) return;
+ const content = await store.loadDocumentContent(store.selectedPackId, docId, document.currentVersionId);
+ setDraftContent(content);
+ },
+ [documents, store],
+ );
+
+ const createPack = useCallback(
+ async (packKey: string, name: string) => {
+ try {
+ await store.createPack(scope, packKey, name);
+ toast.success(`Pack "${name}" created`);
+ } catch (err) {
+ toast.error((err as Error).message);
+ }
+ },
+ [scope, store],
+ );
+
+ const createDocument = useCallback(
+ async (role: LibraryDocumentRecord["role"], path: string, content: string) => {
+ if (!store.selectedPackId) return;
+ try {
+ await store.createDocument(store.selectedPackId, { role, path, content });
+ toast.success(`Document "${path}" created`);
+ } catch (err) {
+ toast.error((err as Error).message);
+ }
+ },
+ [store],
+ );
+
+ const saveDocument = useCallback(async () => {
+ if (!store.selectedPackId || !selectedDocument) return;
+ try {
+ await store.saveDocument(
+ store.selectedPackId,
+ selectedDocument.id,
+ draftContent,
+ selectedDocument.currentVersionId,
+ );
+ toast.success("Document saved");
+ } catch (err) {
+ toast.error((err as Error).message);
+ }
+ }, [draftContent, selectedDocument, store]);
+
+ const createSkill = useCallback(
+ async (skillKey: string, name: string, description: string, entrypointDocId: string) => {
+ if (!store.selectedPackId) return;
+ try {
+ await store.createSkill(store.selectedPackId, { skillKey, name, description, entrypointDocId });
+ toast.success(`Skill "${name}" created`);
+ } catch (err) {
+ toast.error((err as Error).message);
+ }
+ },
+ [store],
+ );
+
+ const publishPack = useCallback(
+ async (version: string, notes?: string) => {
+ if (!store.selectedPackId) return;
+ try {
+ await store.publishPack(store.selectedPackId, version, notes);
+ toast.success(`Pack version ${version} published`);
+ } catch (err) {
+ toast.error((err as Error).message);
+ }
+ },
+ [store],
+ );
+
+ const publishSkill = useCallback(
+ async (skillId: string, version: string, notes?: string) => {
+ if (!store.selectedPackId) return;
+ try {
+ await store.publishSkill(store.selectedPackId, skillId, version, notes);
+ toast.success(`Skill version ${version} published`);
+ } catch (err) {
+ toast.error((err as Error).message);
+ }
+ },
+ [store],
+ );
+
+ const forkPack = useCallback(
+ async (packId: string) => {
+ try {
+ await store.forkPack(packId, "user");
+ toast.success("Pack forked into user-local library");
+ } catch (err) {
+ toast.error((err as Error).message);
+ }
+ },
+ [store],
+ );
+
+ const mergeBase = useCallback(async () => {
+ if (!store.selectedPackId) return;
+ try {
+ const merge = await store.mergeBase(store.selectedPackId);
+ if (merge.status === "clean") toast.success("Merge complete (no conflicts)");
+ else toast.warning(`Merge produced ${merge.conflictDocs.length} conflict(s)`);
+ } catch (err) {
+ toast.error((err as Error).message);
+ }
+ }, [store]);
+
+ const validatePack = useCallback(async () => {
+ if (!store.selectedPackId) return;
+ try {
+ const warnings = await store.validatePackById(store.selectedPackId);
+ if (warnings.length === 0) toast.success("Pack valid");
+ else toast.message(`${warnings.length} validation issue(s)`);
+ } catch (err) {
+ toast.error((err as Error).message);
+ }
+ }, [store]);
+
+ const importArchive = useCallback(
+ async (file: File) => {
+ try {
+ await store.importArchive(file, scope);
+ toast.success("Archive imported");
+ } catch (err) {
+ toast.error((err as Error).message);
+ }
+ },
+ [scope, store],
+ );
+
+ return {
+ scope,
+ setScope,
+ packs,
+ selectedPack,
+ documents,
+ skills,
+ packVersions,
+ selectedDocument,
+ selectedDocId,
+ draftContent,
+ setDraftContent,
+ loading: store.loading,
+ saving: store.saving,
+ selectPack,
+ selectDocument,
+ createPack,
+ createDocument,
+ saveDocument,
+ createSkill,
+ publishPack,
+ publishSkill,
+ forkPack,
+ mergeBase,
+ validatePack,
+ importArchive,
+ workspaceId: store.workspaceId,
+ pendingMerges: store.pendingMerges,
+ conflicts: store.conflicts,
+ };
+}
diff --git a/src/components/workflow/generated-export-dialog.tsx b/src/components/workflow/generated-export-dialog.tsx
index b942a7c..43285a7 100644
--- a/src/components/workflow/generated-export-dialog.tsx
+++ b/src/components/workflow/generated-export-dialog.tsx
@@ -41,6 +41,8 @@ import {
pickExportDirectory,
supportsDirectoryExport,
} from "@/lib/generated-workflow-export";
+import { exportNexusArchive } from "@/lib/library-client";
+import { Package } from "lucide-react";
import { IS_MAC } from "@/lib/platform";
import { useOpenCodeStore } from "@/store/opencode";
import type { WorkflowJSON } from "@/types/workflow";
@@ -117,6 +119,30 @@ export default function GeneratedExportDialog({
}
};
+ const handleNexusArchive = async () => {
+ setIsBusy(true);
+ try {
+ const workflow = getWorkflow();
+ const blob = await exportNexusArchive(workflow, workflow.name || "workflow");
+ const url = URL.createObjectURL(blob);
+ const a = document.createElement("a");
+ const safeName = (workflow.name || "workflow").replace(/[^a-zA-Z0-9_\-]/g, "-").toLowerCase();
+ a.href = url;
+ a.download = `${safeName}.nexus`;
+ document.body.appendChild(a);
+ a.click();
+ document.body.removeChild(a);
+ URL.revokeObjectURL(url);
+ toast.success("Downloaded .nexus archive");
+ onOpenChange(false);
+ } catch (error) {
+ console.error(error);
+ toast.error(error instanceof Error ? error.message : "Failed to build .nexus archive");
+ } finally {
+ setIsBusy(false);
+ }
+ };
+
const handleZipDownload = async () => {
setIsBusy(true);
try {
@@ -330,6 +356,17 @@ export default function GeneratedExportDialog({
Export as a ZIP or write directly into {selectedTarget.rootDir}.
+
+ {isBusy ? : }
+ Download .nexus archive
+
+
+
+ window.dispatchEvent(new CustomEvent("nexus:toggle-documents-panel"))}
+ className={`h-8 rounded-lg px-2.5 text-xs ${TEXT_MUTED} hover:bg-zinc-800/80 hover:text-cyan-300`}
+ >
+
+ Library
+
+
+
+ Documents Skill Library
+
+
void;
+ onSelect: (ref: SkillRef & { packKey?: string; skillKey?: string; skillName?: string }) => void;
+ currentRef?: SkillRef | null;
+}
+
+export function SkillPickerDialog({ open, onOpenChange, onSelect, currentRef }: SkillPickerDialogProps) {
+ const {
+ bootstrap,
+ bootstrapped,
+ workspacePacks,
+ userPacks,
+ skills,
+ packVersions,
+ loadPackDetail,
+ } = useLibraryDocsStore();
+
+ const [activeScope, setActiveScope] = useState(currentRef?.scope ?? "workspace");
+ const [selectedPack, setSelectedPack] = useState(null);
+ const [selectedSkill, setSelectedSkill] = useState(null);
+ const [selectedVersion, setSelectedVersion] = useState("draft");
+
+ useEffect(() => {
+ if (open && !bootstrapped) {
+ void bootstrap();
+ }
+ }, [open, bootstrapped, bootstrap]);
+
+ useEffect(() => {
+ if (selectedPack) {
+ void loadPackDetail(selectedPack.id);
+ }
+ }, [selectedPack, loadPackDetail]);
+
+ const packs = activeScope === "workspace" ? workspacePacks : userPacks;
+ const currentPackSkills = useMemo(() => (selectedPack ? skills[selectedPack.id] ?? [] : []), [selectedPack, skills]);
+ const currentPackVersions = useMemo(() => (selectedPack ? packVersions[selectedPack.id] ?? [] : []), [selectedPack, packVersions]);
+
+ const handleConfirm = () => {
+ if (!selectedPack || !selectedSkill) return;
+ onSelect({
+ scope: activeScope,
+ packId: selectedPack.id,
+ packKey: selectedPack.packKey,
+ packVersion: selectedVersion,
+ skillId: selectedSkill.id,
+ skillKey: selectedSkill.skillKey,
+ skillName: selectedSkill.name,
+ });
+ onOpenChange(false);
+ };
+
+ return (
+
+
+
+ Link Library Skill
+
+ setActiveScope(v as LibraryScope)}>
+
+ Workspace
+ User-local
+
+
+
+
+
Pack
+
+
+ {packs.length === 0 && (
+
No packs in {activeScope} library.
+ )}
+ {packs.map((pack) => (
+
{
+ setSelectedPack(pack);
+ setSelectedSkill(null);
+ setSelectedVersion("draft");
+ }}
+ className={`w-full text-left px-2 py-1.5 rounded text-sm hover:bg-zinc-800/50 ${selectedPack?.id === pack.id ? "bg-zinc-800/80 text-zinc-100" : "text-zinc-300"}`}
+ >
+ {pack.name}
+ {pack.packKey}
+
+ ))}
+
+
+
+
+
Skill
+
+
+ {!selectedPack && (
+
Select a pack.
+ )}
+ {selectedPack && currentPackSkills.length === 0 && (
+
No skills in this pack.
+ )}
+ {currentPackSkills.map((skill) => (
+
setSelectedSkill(skill)}
+ className={`w-full text-left px-2 py-1.5 rounded text-sm hover:bg-zinc-800/50 ${selectedSkill?.id === skill.id ? "bg-zinc-800/80 text-zinc-100" : "text-zinc-300"} ${skill.deprecated ? "opacity-60" : ""}`}
+ >
+ {skill.name}{skill.deprecated && " (deprecated)"}
+ {skill.skillKey}
+
+ ))}
+
+
+
+
+
+ Version
+ setSelectedVersion(e.target.value)}
+ disabled={!selectedSkill}
+ className="w-full bg-zinc-900 border border-zinc-700 rounded-md px-2 py-1.5 text-sm"
+ >
+ draft (live)
+ {currentPackVersions.map((pv) => (
+ {pv.version}{pv.deprecated ? " (deprecated)" : ""}
+ ))}
+
+
+
+
+
+ onOpenChange(false)}>Cancel
+ Link Skill
+
+
+
+ );
+}
+
+export interface InlineSkillRefDisplayProps {
+ skillRef: SkillRef & { packKey?: string; skillKey?: string; skillName?: string };
+ onDetach: () => void;
+}
+
+export function InlineSkillRefDisplay({ skillRef, onDetach }: InlineSkillRefDisplayProps) {
+ return (
+
+
+
Library reference
+
Detach
+
+
+
scope: {skillRef.scope}
+
pack: {skillRef.packKey ?? skillRef.packId}
+
version: {skillRef.packVersion}
+
skill: {skillRef.skillName ?? skillRef.skillKey ?? skillRef.skillId}
+
+
+ );
+}
+
+interface LibraryRefSectionProps {
+ value: SkillRef | null;
+ onChange: (value: SkillRef | null) => void;
+}
+
+export function LibraryRefSection({ value, onChange }: LibraryRefSectionProps) {
+ const [open, setOpen] = useState(false);
+ return (
+
+ Library Reference
+ {value ? (
+ onChange(null)} />
+ ) : (
+ setOpen(true)}>
+ Link to library skill
+
+ )}
+ {value && (
+ setOpen(true)}>
+ Change library skill
+
+ )}
+ onChange(ref)}
+ currentRef={value}
+ />
+
+ );
+}
+
+interface InputProps extends React.InputHTMLAttributes {
+ className?: string;
+}
+
+export function VersionInput({ className, ...rest }: InputProps) {
+ return ;
+}
diff --git a/src/components/workflow/workflow-editor.tsx b/src/components/workflow/workflow-editor.tsx
index 336b11e..7cfa227 100644
--- a/src/components/workflow/workflow-editor.tsx
+++ b/src/components/workflow/workflow-editor.tsx
@@ -1,6 +1,6 @@
"use client";
-import { useEffect } from "react";
+import { useEffect, useState } from "react";
import { ReactFlowProvider } from "@xyflow/react";
import "@xyflow/react/dist/style.css";
import { useWorkflowStore } from "@/store/workflow";
@@ -19,6 +19,7 @@ import PropertiesPanel from "./properties-panel";
import DeleteDialog from "./delete-dialog";
import LibraryPanel from "./library-panel";
import { BrainPanel } from "./brain-panel";
+import { DocumentsPanel } from "./documents-panel";
import SubWorkflowCanvas from "./sub-workflow-canvas";
import FloatingPromptGen from "./floating-prompt-gen";
import FloatingWorkflowGen from "./floating-workflow-gen";
@@ -53,7 +54,7 @@ export default function WorkflowEditor({
initialWorkflow,
}: WorkflowEditorProps = {}) {
const isWorkspaceMode = Boolean(workspaceId && workflowId);
-
+ const [documentsPanelOpen, setDocumentsPanelOpen] = useState(false);
const closePropertiesPanel = useWorkflowStore((s) => s.closePropertiesPanel);
const getWorkflowJSON = useWorkflowStore((s) => s.getWorkflowJSON);
const loadWorkflow = useWorkflowStore((s) => s.loadWorkflow);
@@ -114,6 +115,13 @@ export default function WorkflowEditor({
return () => window.removeEventListener("nexus:open-sub-workflow", handler);
}, [openSubWorkflow]);
+ // Listen for documents-panel toggle events
+ useEffect(() => {
+ const handler = () => setDocumentsPanelOpen((open) => !open);
+ window.addEventListener("nexus:toggle-documents-panel", handler);
+ return () => window.removeEventListener("nexus:toggle-documents-panel", handler);
+ }, []);
+
// Keyboard shortcuts (global — dialogs are managed by Header)
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
@@ -273,6 +281,7 @@ export default function WorkflowEditor({
+ setDocumentsPanelOpen(false)} />
diff --git a/src/lib/__tests__/library-export.test.ts b/src/lib/__tests__/library-export.test.ts
new file mode 100644
index 0000000..66baaba
--- /dev/null
+++ b/src/lib/__tests__/library-export.test.ts
@@ -0,0 +1,123 @@
+import fs from "node:fs/promises";
+import os from "node:os";
+import path from "node:path";
+import JSZip from "jszip";
+import { afterEach, beforeEach, describe, expect, it } from "bun:test";
+import {
+ getLibraryStore,
+ resetLibraryStoreForTests,
+ resetLibraryConfigCache,
+ buildNexusArchive,
+ computeContentHash,
+ resolveFromArtifact,
+} from "@/lib/library-store";
+
+let tempDir = "";
+
+describe("Nexus archive export", () => {
+ beforeEach(async () => {
+ tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "nexus-export-test-"));
+ process.env.NEXUS_LIBRARY_DATA_DIR = tempDir;
+ process.env.NEXUS_BRAIN_TOKEN_SECRET = "test-secret";
+ resetLibraryStoreForTests();
+ resetLibraryConfigCache();
+ });
+
+ afterEach(async () => {
+ resetLibraryStoreForTests();
+ resetLibraryConfigCache();
+ delete process.env.NEXUS_LIBRARY_DATA_DIR;
+ if (tempDir) await fs.rm(tempDir, { recursive: true, force: true });
+ });
+
+ async function setupPackWithSkill() {
+ const store = getLibraryStore();
+ const { workspace } = await store.ensureLibraries("ws-x", "user-x");
+ const pack = await store.createPack(workspace.id, { packKey: "support", name: "Support" });
+ const { document } = await store.createDocument(pack.id, {
+ role: "skill-entrypoint",
+ path: "SKILL.md",
+ content: "---\nname: triage\ndescription: tri\n---\nbody",
+ });
+ const skill = await store.createSkill(pack.id, { skillKey: "triage", name: "Triage", description: "tri", entrypointDocId: document.id });
+ return { pack, skill };
+ }
+
+ it("builds a .nexus archive containing workflow + packs + hashes (AC-10)", async () => {
+ const { pack, skill } = await setupPackWithSkill();
+ const workflowJson = {
+ name: "demo",
+ nodes: [
+ {
+ id: "n1",
+ type: "skill",
+ position: { x: 0, y: 0 },
+ data: {
+ type: "skill",
+ label: "Skill",
+ name: "n1",
+ skillName: "triage",
+ description: "",
+ promptText: "",
+ detectedVariables: [],
+ variableMappings: {},
+ metadata: [],
+ libraryRef: { scope: "workspace", packId: pack.id, packKey: pack.packKey, packVersion: "draft", skillId: skill.id, skillKey: skill.skillKey },
+ },
+ },
+ ],
+ edges: [],
+ };
+ const { buffer, archiveName } = await buildNexusArchive({ workflowJson, workflowName: "demo" });
+ expect(archiveName.endsWith(".nexus")).toBe(true);
+ const zip = await JSZip.loadAsync(buffer);
+ expect(zip.file("manifest.json")).toBeTruthy();
+ expect(zip.file("workflow.json")).toBeTruthy();
+ expect(zip.file("hashes.json")).toBeTruthy();
+ expect(zip.file("runtime/resolver-metadata.json")).toBeTruthy();
+ expect(zip.file(`libraries/workspace/packs/${pack.packKey}/skills/${skill.skillKey}/SKILL.md`)).toBeTruthy();
+ });
+
+ it("hash validation round-trip succeeds (AC-10, FR-65)", async () => {
+ const { pack, skill } = await setupPackWithSkill();
+ const workflowJson = {
+ nodes: [
+ {
+ data: {
+ libraryRef: { scope: "workspace", packId: pack.id, packKey: pack.packKey, packVersion: "draft", skillId: skill.id, skillKey: skill.skillKey },
+ },
+ },
+ ],
+ };
+ const { buffer } = await buildNexusArchive({ workflowJson, workflowName: "demo" });
+ const zip = await JSZip.loadAsync(buffer);
+ const hashes = JSON.parse(await zip.file("hashes.json")!.async("string"));
+ for (const [key, expected] of Object.entries(hashes)) {
+ const file = zip.file(key);
+ expect(file).toBeTruthy();
+ const content = await file!.async("string");
+ expect(computeContentHash(content)).toBe(expected as string);
+ }
+ });
+
+ it("resolves from artifact without live library (AC-11)", async () => {
+ const { pack, skill } = await setupPackWithSkill();
+ const workflowJson = {
+ nodes: [{ data: { libraryRef: { scope: "workspace", packId: pack.id, packKey: pack.packKey, packVersion: "draft", skillId: skill.id, skillKey: skill.skillKey } } }],
+ };
+ const { buffer } = await buildNexusArchive({ workflowJson, workflowName: "demo" });
+ const zip = await JSZip.loadAsync(buffer);
+ const resolverMetadata = JSON.parse(await zip.file("runtime/resolver-metadata.json")!.async("string"));
+ const files = new Map();
+ for (const filename of Object.keys(zip.files)) {
+ if (zip.files[filename].dir) continue;
+ files.set(filename, await zip.file(filename)!.async("string"));
+ }
+ const bundle = resolveFromArtifact(
+ { scope: "workspace", packId: pack.id, packVersion: "draft", skillId: skill.id },
+ { manifest: { schemaVersion: 1, packs: [] }, resolverMetadata: resolverMetadata.entries, files },
+ );
+ expect(bundle).not.toBeNull();
+ expect(bundle?.entrypoint.content).toContain("body");
+ });
+});
diff --git a/src/lib/__tests__/library-import.test.ts b/src/lib/__tests__/library-import.test.ts
new file mode 100644
index 0000000..9b05868
--- /dev/null
+++ b/src/lib/__tests__/library-import.test.ts
@@ -0,0 +1,80 @@
+import fs from "node:fs/promises";
+import os from "node:os";
+import path from "node:path";
+import JSZip from "jszip";
+import { afterEach, beforeEach, describe, expect, it } from "bun:test";
+import {
+ getLibraryStore,
+ resetLibraryStoreForTests,
+ resetLibraryConfigCache,
+ buildNexusArchive,
+ importNexusArchive,
+ importAgentSkillsFolder,
+} from "@/lib/library-store";
+
+let tempDir = "";
+
+describe("Nexus import", () => {
+ beforeEach(async () => {
+ tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "nexus-import-test-"));
+ process.env.NEXUS_LIBRARY_DATA_DIR = tempDir;
+ process.env.NEXUS_BRAIN_TOKEN_SECRET = "test-secret";
+ resetLibraryStoreForTests();
+ resetLibraryConfigCache();
+ });
+
+ afterEach(async () => {
+ resetLibraryStoreForTests();
+ resetLibraryConfigCache();
+ delete process.env.NEXUS_LIBRARY_DATA_DIR;
+ if (tempDir) await fs.rm(tempDir, { recursive: true, force: true });
+ });
+
+ it("round-trips Nexus-native export + import", async () => {
+ const store = getLibraryStore();
+ const { workspace } = await store.ensureLibraries("ws-1", "user-1");
+ const pack = await store.createPack(workspace.id, { packKey: "src", name: "Src" });
+ const { document } = await store.createDocument(pack.id, {
+ role: "skill-entrypoint",
+ path: "SKILL.md",
+ content: "---\nname: original\ndescription: original\n---\nbody",
+ });
+ const skill = await store.createSkill(pack.id, { skillKey: "original", name: "Original", description: "", entrypointDocId: document.id });
+
+ const workflowJson = {
+ nodes: [{ data: { libraryRef: { scope: "workspace", packId: pack.id, packKey: pack.packKey, packVersion: "draft", skillId: skill.id, skillKey: skill.skillKey } } }],
+ };
+ const { buffer } = await buildNexusArchive({ workflowJson, workflowName: "demo" });
+
+ const result = await importNexusArchive({ buffer, workspaceId: "ws-2", scope: "workspace" });
+ expect(result.packs).toHaveLength(1);
+ const newPack = result.packs[0];
+ const newSkills = await store.listSkills(newPack.id);
+ expect(newSkills).toHaveLength(1);
+ const docs = await store.listDocuments(newPack.id);
+ const entrypoint = docs.find((d) => d.role === "skill-entrypoint");
+ expect(entrypoint).toBeTruthy();
+ const restoredContent = await store.readDocumentContent(entrypoint!.id, entrypoint!.currentVersionId);
+ expect(restoredContent).toContain("body");
+ });
+
+ it("rejects archive with hash mismatch (FR-67)", async () => {
+ const zip = new JSZip();
+ zip.file("manifest.json", JSON.stringify({ schemaVersion: 1, workflowName: "x", createdAt: "", createdBy: "", resolverMode: "artifact", packs: [], skills: [] }));
+ zip.file("workflow.json", "{}");
+ zip.file("hashes.json", JSON.stringify({ "workflow.json": "wrong-hash", "manifest.json": "wrong-hash" }));
+ const buffer = await zip.generateAsync({ type: "nodebuffer" });
+ await expect(importNexusArchive({ buffer, workspaceId: "ws-3", scope: "workspace" })).rejects.toThrow();
+ });
+
+ it("best-effort imports an Agent Skills zip with a single SKILL.md", async () => {
+ const zip = new JSZip();
+ zip.file("my-skill/SKILL.md", "---\nname: my-skill\ndescription: x\n---\nbody");
+ const buffer = await zip.generateAsync({ type: "nodebuffer" });
+ const result = await importAgentSkillsFolder({ buffer, workspaceId: "ws-4", packKey: "import-pack", scope: "user" });
+ expect(result.packs).toHaveLength(1);
+ const store = getLibraryStore();
+ const skills = await store.listSkills(result.packs[0].id);
+ expect(skills).toHaveLength(1);
+ });
+});
diff --git a/src/lib/__tests__/library-merge.test.ts b/src/lib/__tests__/library-merge.test.ts
new file mode 100644
index 0000000..5499ea7
--- /dev/null
+++ b/src/lib/__tests__/library-merge.test.ts
@@ -0,0 +1,36 @@
+import { describe, expect, it } from "bun:test";
+import { threeWayTextMerge } from "@/lib/library-store/merge";
+
+describe("threeWayTextMerge", () => {
+ it("returns identical text when both sides match", () => {
+ const result = threeWayTextMerge("a\nb\nc", "a\nb\nc", "a\nb\nc");
+ expect(result.cleanlyMerged).toBe(true);
+ expect(result.content).toBe("a\nb\nc");
+ });
+
+ it("takes the side that diverged when the other matches ancestor", () => {
+ const result = threeWayTextMerge("a\nb\nc", "a\nb\nc", "a\nB\nc");
+ expect(result.cleanlyMerged).toBe(true);
+ expect(result.content).toBe("a\nB\nc");
+ });
+
+ it("identical concurrent edits merge cleanly", () => {
+ const result = threeWayTextMerge("a\nb\nc", "a\nB\nc", "a\nB\nc");
+ expect(result.cleanlyMerged).toBe(true);
+ expect(result.content).toBe("a\nB\nc");
+ });
+
+ it("same-line conflict produces conflict marker", () => {
+ const result = threeWayTextMerge("a\nb\nc", "a\nX\nc", "a\nY\nc");
+ expect(result.cleanlyMerged).toBe(false);
+ expect(result.conflicts.length).toBeGreaterThan(0);
+ expect(result.content).toContain("<<<<<<<");
+ expect(result.content).toContain(">>>>>>>");
+ });
+
+ it("add_add conflict when ancestor is empty", () => {
+ const result = threeWayTextMerge("", "a", "b");
+ expect(result.cleanlyMerged).toBe(false);
+ expect(result.conflicts[0]?.conflictType).toBe("add_add");
+ });
+});
diff --git a/src/lib/__tests__/library-resolver.test.ts b/src/lib/__tests__/library-resolver.test.ts
new file mode 100644
index 0000000..708b46c
--- /dev/null
+++ b/src/lib/__tests__/library-resolver.test.ts
@@ -0,0 +1,62 @@
+import fs from "node:fs/promises";
+import os from "node:os";
+import path from "node:path";
+import { afterEach, beforeEach, describe, expect, it } from "bun:test";
+import {
+ getLibraryStore,
+ resetLibraryStoreForTests,
+ resetLibraryConfigCache,
+} from "@/lib/library-store";
+
+let tempDir = "";
+
+describe("library resolver", () => {
+ beforeEach(async () => {
+ tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "nexus-resolver-test-"));
+ process.env.NEXUS_LIBRARY_DATA_DIR = tempDir;
+ process.env.NEXUS_BRAIN_TOKEN_SECRET = "test-secret";
+ resetLibraryStoreForTests();
+ resetLibraryConfigCache();
+ });
+
+ afterEach(async () => {
+ resetLibraryStoreForTests();
+ resetLibraryConfigCache();
+ delete process.env.NEXUS_LIBRARY_DATA_DIR;
+ if (tempDir) await fs.rm(tempDir, { recursive: true, force: true });
+ });
+
+ it("draft live resolution returns current head", async () => {
+ const store = getLibraryStore();
+ const { workspace } = await store.ensureLibraries("ws", "user");
+ const pack = await store.createPack(workspace.id, { packKey: "p", name: "P" });
+ const { document } = await store.createDocument(pack.id, {
+ role: "skill-entrypoint",
+ path: "SKILL.md",
+ content: "---\nname: x\ndescription: y\n---\nv1",
+ });
+ const skill = await store.createSkill(pack.id, { skillKey: "x", name: "X", description: "y", entrypointDocId: document.id });
+ let bundle = await store.resolveLive({ scope: "workspace", packId: pack.id, packVersion: "draft", skillId: skill.id });
+ expect(bundle?.entrypoint.content).toContain("v1");
+ await store.saveDocumentVersion(document.id, { content: "---\nname: x\ndescription: y\n---\nv2", previousVersionId: document.currentVersionId });
+ bundle = await store.resolveLive({ scope: "workspace", packId: pack.id, packVersion: "draft", skillId: skill.id });
+ expect(bundle?.entrypoint.content).toContain("v2");
+ });
+
+ it("pinned version ignores subsequent draft edits", async () => {
+ const store = getLibraryStore();
+ const { workspace } = await store.ensureLibraries("ws", "user");
+ const pack = await store.createPack(workspace.id, { packKey: "p", name: "P" });
+ const { document } = await store.createDocument(pack.id, {
+ role: "skill-entrypoint",
+ path: "SKILL.md",
+ content: "---\nname: x\ndescription: y\n---\nfrozen",
+ });
+ const skill = await store.createSkill(pack.id, { skillKey: "x", name: "X", description: "y", entrypointDocId: document.id });
+ await store.publishPackVersion(pack.id, { version: "1.0.0" });
+ await store.saveDocumentVersion(document.id, { content: "---\nname: x\ndescription: y\n---\nedited", previousVersionId: document.currentVersionId });
+ const bundle = await store.resolveLive({ scope: "workspace", packId: pack.id, packVersion: "1.0.0", skillId: skill.id });
+ expect(bundle?.entrypoint.content).toContain("frozen");
+ expect(bundle?.entrypoint.content).not.toContain("edited");
+ });
+});
diff --git a/src/lib/__tests__/library-store.test.ts b/src/lib/__tests__/library-store.test.ts
new file mode 100644
index 0000000..15daa4b
--- /dev/null
+++ b/src/lib/__tests__/library-store.test.ts
@@ -0,0 +1,198 @@
+import fs from "node:fs/promises";
+import os from "node:os";
+import path from "node:path";
+import { afterEach, beforeEach, describe, expect, it } from "bun:test";
+import {
+ getLibraryStore,
+ resetLibraryStoreForTests,
+ resetLibraryConfigCache,
+ StaleVersionError,
+} from "@/lib/library-store";
+
+let tempDir = "";
+const WORKSPACE_ID = "ws-test-123";
+const USER_ID = "user-test-1";
+
+describe("LibraryStore", () => {
+ beforeEach(async () => {
+ tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "nexus-library-test-"));
+ process.env.NEXUS_LIBRARY_DATA_DIR = tempDir;
+ process.env.NEXUS_BRAIN_TOKEN_SECRET = "test-secret";
+ resetLibraryStoreForTests();
+ resetLibraryConfigCache();
+ });
+
+ afterEach(async () => {
+ resetLibraryStoreForTests();
+ resetLibraryConfigCache();
+ delete process.env.NEXUS_LIBRARY_DATA_DIR;
+ if (tempDir) await fs.rm(tempDir, { recursive: true, force: true });
+ });
+
+ it("creates workspace and user libraries", async () => {
+ const store = getLibraryStore();
+ const { workspace, user } = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ expect(workspace.scope).toBe("workspace");
+ expect(user?.scope).toBe("user");
+ const again = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ expect(again.workspace.id).toBe(workspace.id);
+ expect(again.user?.id).toBe(user?.id);
+ });
+
+ it("createPackWithTwoSkills (AC-1)", async () => {
+ const store = getLibraryStore();
+ const { workspace } = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ const pack = await store.createPack(workspace.id, { packKey: "support", name: "Support" });
+ const { document: skillADoc } = await store.createDocument(pack.id, {
+ role: "skill-entrypoint",
+ path: "support-triage/SKILL.md",
+ content: "# Triage",
+ });
+ const { document: skillBDoc } = await store.createDocument(pack.id, {
+ role: "skill-entrypoint",
+ path: "support-escalate/SKILL.md",
+ content: "# Escalate",
+ });
+ const { document: refDoc } = await store.createDocument(pack.id, {
+ role: "reference",
+ path: "references/policy.md",
+ content: "# policy",
+ });
+ await store.createSkill(pack.id, { skillKey: "support-triage", name: "Triage", description: "", entrypointDocId: skillADoc.id });
+ await store.createSkill(pack.id, { skillKey: "support-escalate", name: "Escalate", description: "", entrypointDocId: skillBDoc.id });
+ const skills = await store.listSkills(pack.id);
+ expect(skills).toHaveLength(2);
+ const docs = await store.listDocuments(pack.id);
+ expect(docs.find((d) => d.id === refDoc.id)?.path).toBe("references/policy.md");
+ });
+
+ it("rejects stale previousVersionId on saveDocumentVersion (FR-14)", async () => {
+ const store = getLibraryStore();
+ const { workspace } = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ const pack = await store.createPack(workspace.id, { packKey: "p", name: "P" });
+ const { document } = await store.createDocument(pack.id, { role: "doc", path: "a.md", content: "v1" });
+ await store.saveDocumentVersion(document.id, { content: "v2", previousVersionId: document.currentVersionId });
+ await expect(
+ store.saveDocumentVersion(document.id, { content: "v3", previousVersionId: document.currentVersionId }),
+ ).rejects.toBeInstanceOf(StaleVersionError);
+ });
+
+ it("versionSnapshot persists content under documents/{id}/versions/{v}/content.md (AC-4)", async () => {
+ const store = getLibraryStore();
+ const { workspace } = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ const pack = await store.createPack(workspace.id, { packKey: "p", name: "P" });
+ const { document, version } = await store.createDocument(pack.id, { role: "doc", path: "a.md", content: "hello" });
+ const filePath = path.join(tempDir, "objects", "documents", document.id, "versions", version.id, "content.md");
+ const content = await fs.readFile(filePath, "utf8");
+ expect(content).toBe("hello");
+ });
+
+ it("forks pack and copies skill+document rows with base_version_id (AC-2)", async () => {
+ const store = getLibraryStore();
+ const { workspace, user } = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ const pack = await store.createPack(workspace.id, { packKey: "src", name: "Src" });
+ const { document } = await store.createDocument(pack.id, { role: "skill-entrypoint", path: "SKILL.md", content: "src content" });
+ await store.createSkill(pack.id, { skillKey: "src", name: "Src", description: "", entrypointDocId: document.id });
+ const fork = await store.forkPack(pack.id, user!.id);
+ expect(fork.basePackId).toBe(pack.id);
+ const forkDocs = await store.listDocuments(fork.id);
+ expect(forkDocs.some((d) => d.path === "SKILL.md")).toBe(true);
+ const forkSkills = await store.listSkills(fork.id);
+ expect(forkSkills).toHaveLength(1);
+ });
+
+ it("merges base into fork cleanly (AC-5)", async () => {
+ const store = getLibraryStore();
+ const { workspace, user } = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ const pack = await store.createPack(workspace.id, { packKey: "src", name: "Src" });
+ const { document } = await store.createDocument(pack.id, { role: "doc", path: "a.md", content: "line1\nline2\n" });
+ const fork = await store.forkPack(pack.id, user!.id);
+
+ await store.saveDocumentVersion(document.id, { content: "line1\nline2\nline3\n", previousVersionId: document.currentVersionId });
+ const merge = await store.mergeBaseIntoBranch(fork.id);
+ expect(merge.status).toBe("clean");
+ const forkDocs = await store.listDocuments(fork.id);
+ const forkDoc = forkDocs.find((d) => d.path === "a.md");
+ const updated = await store.readDocumentContent(forkDoc!.id, forkDoc!.currentVersionId);
+ expect(updated).toBe("line1\nline2\nline3\n");
+ });
+
+ it("merge with same-line conflict creates document_merges + document_conflicts (AC-6)", async () => {
+ const store = getLibraryStore();
+ const { workspace, user } = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ const pack = await store.createPack(workspace.id, { packKey: "src", name: "Src" });
+ const { document } = await store.createDocument(pack.id, { role: "doc", path: "a.md", content: "shared\n" });
+ const fork = await store.forkPack(pack.id, user!.id);
+ const forkDocs = await store.listDocuments(fork.id);
+ const forkDoc = forkDocs.find((d) => d.path === "a.md")!;
+
+ await store.saveDocumentVersion(document.id, { content: "base-edit\n", previousVersionId: document.currentVersionId });
+ await store.saveDocumentVersion(forkDoc.id, { content: "fork-edit\n", previousVersionId: forkDoc.currentVersionId });
+
+ const merge = await store.mergeBaseIntoBranch(fork.id);
+ expect(merge.status).toBe("conflict");
+ expect(merge.conflictDocs.length).toBeGreaterThan(0);
+ const conflicts = await store.listConflicts(merge.id);
+ expect(conflicts.length).toBeGreaterThan(0);
+ });
+
+ it("resolveMergeConflict updates branch head (FR-27)", async () => {
+ const store = getLibraryStore();
+ const { workspace, user } = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ const pack = await store.createPack(workspace.id, { packKey: "src", name: "Src" });
+ const { document } = await store.createDocument(pack.id, { role: "doc", path: "a.md", content: "shared\n" });
+ const fork = await store.forkPack(pack.id, user!.id);
+ const forkDocs = await store.listDocuments(fork.id);
+ const forkDoc = forkDocs.find((d) => d.path === "a.md")!;
+ await store.saveDocumentVersion(document.id, { content: "X\n", previousVersionId: document.currentVersionId });
+ await store.saveDocumentVersion(forkDoc.id, { content: "Y\n", previousVersionId: forkDoc.currentVersionId });
+
+ const merge = await store.mergeBaseIntoBranch(fork.id);
+ const resolved = await store.resolveMergeConflict(merge.id, { resolvedContentByDocId: { [forkDoc.id]: "merged\n" }, resolvedBy: "user" });
+ expect(resolved.status).toBe("resolved");
+ const refreshed = await store.listDocuments(fork.id);
+ const refreshedDoc = refreshed.find((d) => d.id === forkDoc.id)!;
+ const finalContent = await store.readDocumentContent(refreshedDoc.id, refreshedDoc.currentVersionId);
+ expect(finalContent).toBe("merged\n");
+ });
+
+ it("publishes pack version snapshotting current doc heads (AC-7, FR-42)", async () => {
+ const store = getLibraryStore();
+ const { workspace } = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ const pack = await store.createPack(workspace.id, { packKey: "p", name: "P" });
+ const { document } = await store.createDocument(pack.id, { role: "skill-entrypoint", path: "SKILL.md", content: "---\nname: x\ndescription: y\n---\n" });
+ await store.createSkill(pack.id, { skillKey: "x", name: "X", description: "y", entrypointDocId: document.id });
+ const pv = await store.publishPackVersion(pack.id, { version: "1.0.0" });
+ expect(pv.version).toBe("1.0.0");
+ const manifestPath = path.join(tempDir, "objects", "packs", pack.id, "versions", pv.id, "manifest.json");
+ const exists = await fs.stat(manifestPath).then(() => true).catch(() => false);
+ expect(exists).toBe(true);
+ });
+
+ it("publishes skill version snapshotting closure (AC-8, FR-43)", async () => {
+ const store = getLibraryStore();
+ const { workspace } = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ const pack = await store.createPack(workspace.id, { packKey: "p", name: "P" });
+ const { document } = await store.createDocument(pack.id, { role: "skill-entrypoint", path: "SKILL.md", content: "---\nname: x\ndescription: y\n---\n" });
+ const skill = await store.createSkill(pack.id, { skillKey: "x", name: "X", description: "y", entrypointDocId: document.id });
+ const sv = await store.publishSkillVersion(skill.id, { version: "0.1.0" });
+ const list = await store.listSkillVersions(skill.id);
+ expect(list).toHaveLength(1);
+ expect(list[0].id).toBe(sv.id);
+ });
+
+ it("resolveLive returns SkillBundle for draft and pinned version", async () => {
+ const store = getLibraryStore();
+ const { workspace } = await store.ensureLibraries(WORKSPACE_ID, USER_ID);
+ const pack = await store.createPack(workspace.id, { packKey: "p", name: "P" });
+ const { document } = await store.createDocument(pack.id, { role: "skill-entrypoint", path: "SKILL.md", content: "---\nname: x\ndescription: y\n---\nbody-1\n" });
+ const skill = await store.createSkill(pack.id, { skillKey: "x", name: "X", description: "y", entrypointDocId: document.id });
+ const draft = await store.resolveLive({ scope: "workspace", packId: pack.id, packVersion: "draft", skillId: skill.id });
+ expect(draft?.entrypoint.content).toContain("body-1");
+ const pv = await store.publishPackVersion(pack.id, { version: "1.0.0" });
+ await store.saveDocumentVersion(document.id, { content: "---\nname: x\ndescription: y\n---\nbody-2\n", previousVersionId: document.currentVersionId });
+ const pinned = await store.resolveLive({ scope: "workspace", packId: pack.id, packVersion: "1.0.0", skillId: skill.id });
+ expect(pinned?.entrypoint.content).toContain("body-1");
+ expect(pv.id).toBeTruthy();
+ });
+});
diff --git a/src/lib/__tests__/library-validation.test.ts b/src/lib/__tests__/library-validation.test.ts
new file mode 100644
index 0000000..50d4fc9
--- /dev/null
+++ b/src/lib/__tests__/library-validation.test.ts
@@ -0,0 +1,106 @@
+import { describe, expect, it } from "bun:test";
+import { validatePack, parseFrontmatter } from "@/lib/library-store/validation";
+import type { LibraryDocumentRecord, PackRecord, SkillRecord } from "@/lib/library-store/types";
+
+function buildPack(overrides: Partial = {}): PackRecord {
+ return {
+ id: "p1",
+ libraryId: "lib1",
+ packKey: "test",
+ name: "Test",
+ description: "",
+ tags: [],
+ basePackId: null,
+ external: false,
+ currentBranchId: "b1",
+ createdBy: "",
+ createdAt: "2026-01-01",
+ updatedAt: "2026-01-01",
+ deletedAt: null,
+ ...overrides,
+ };
+}
+
+function buildDoc(id: string, role: LibraryDocumentRecord["role"], path_: string): LibraryDocumentRecord {
+ return {
+ id,
+ packId: "p1",
+ role,
+ path: path_,
+ currentVersionId: `v-${id}`,
+ createdBy: "",
+ createdAt: "2026-01-01",
+ updatedAt: "2026-01-01",
+ deletedAt: null,
+ };
+}
+
+function buildSkill(id: string, skillKey: string, entrypointDocId: string): SkillRecord {
+ return {
+ id,
+ packId: "p1",
+ skillKey,
+ name: skillKey,
+ description: "",
+ entrypointDocId,
+ createdBy: "",
+ createdAt: "2026-01-01",
+ updatedAt: "2026-01-01",
+ deletedAt: null,
+ deprecated: false,
+ };
+}
+
+describe("validatePack", () => {
+ it("flags missing entrypoint", () => {
+ const pack = buildPack();
+ const skills = [buildSkill("s1", "x", "missing-doc")];
+ const warnings = validatePack({ pack, skills, documents: [], documentContents: new Map() });
+ expect(warnings.some((w) => w.code === "missing_entrypoint")).toBe(true);
+ });
+
+ it("flags invalid frontmatter", () => {
+ const pack = buildPack();
+ const doc = buildDoc("d1", "skill-entrypoint", "SKILL.md");
+ const skills = [buildSkill("s1", "x", "d1")];
+ const contents = new Map([["d1", "no frontmatter here"]]);
+ const warnings = validatePack({ pack, skills, documents: [doc], documentContents: contents });
+ expect(warnings.some((w) => w.code === "invalid_frontmatter")).toBe(true);
+ });
+
+ it("flags duplicate skill keys", () => {
+ const pack = buildPack();
+ const doc = buildDoc("d1", "skill-entrypoint", "SKILL.md");
+ const skills = [buildSkill("s1", "dup", "d1"), buildSkill("s2", "dup", "d1")];
+ const contents = new Map([["d1", "---\nname: dup\ndescription: y\n---\n"]]);
+ const warnings = validatePack({ pack, skills, documents: [doc], documentContents: contents });
+ expect(warnings.some((w) => w.code === "duplicate_skill_id")).toBe(true);
+ });
+
+ it("flags broken relative references", () => {
+ const pack = buildPack();
+ const doc = buildDoc("d1", "doc", "a.md");
+ const contents = new Map([["d1", "see [other](./missing.md)"]]);
+ const warnings = validatePack({ pack, skills: [], documents: [doc], documentContents: contents });
+ expect(warnings.some((w) => w.code === "broken_reference")).toBe(true);
+ });
+
+ it("flags unresolved merge", () => {
+ const pack = buildPack();
+ const warnings = validatePack({ pack, skills: [], documents: [], documentContents: new Map(), unresolvedMergeIds: ["m1"] });
+ expect(warnings.some((w) => w.code === "unresolved_merge")).toBe(true);
+ });
+});
+
+describe("parseFrontmatter", () => {
+ it("extracts data and body", () => {
+ const result = parseFrontmatter("---\nname: foo\ndescription: bar\n---\nbody text\n");
+ expect(result.data).toEqual({ name: "foo", description: "bar" });
+ expect(result.body).toContain("body text");
+ });
+
+ it("returns null data when no frontmatter", () => {
+ const result = parseFrontmatter("just body");
+ expect(result.data).toBeNull();
+ });
+});
diff --git a/src/lib/collaboration/lib-doc-collab.ts b/src/lib/collaboration/lib-doc-collab.ts
new file mode 100644
index 0000000..660bcff
--- /dev/null
+++ b/src/lib/collaboration/lib-doc-collab.ts
@@ -0,0 +1,82 @@
+"use client";
+
+import * as Y from "yjs";
+import { HocuspocusProvider } from "@hocuspocus/provider";
+import { getCollabServerUrl } from "./config";
+import { getColorForClientId, getOrCreateUserName } from "./awareness-names";
+import type { LibraryScope } from "@/types/library";
+
+export interface OpenLibraryDocOptions {
+ workspaceId: string;
+ scope: LibraryScope;
+ packId: string;
+ docId: string;
+ initialContent?: string;
+}
+
+export interface LibraryDocRoom {
+ provider: HocuspocusProvider;
+ ydoc: Y.Doc;
+ yText: Y.Text;
+ roomId: string;
+ destroy: () => void;
+}
+
+const activeRooms = new Map();
+
+export function buildLibraryRoomId(workspaceId: string, scope: LibraryScope, packId: string, docId: string): string {
+ return `lib:${workspaceId}:${scope}:${packId}:${docId}`;
+}
+
+export function openLibraryDocRoom(options: OpenLibraryDocOptions): LibraryDocRoom {
+ const roomId = buildLibraryRoomId(options.workspaceId, options.scope, options.packId, options.docId);
+ const existing = activeRooms.get(roomId);
+ if (existing) return existing;
+
+ const ydoc = new Y.Doc();
+ const yText = ydoc.getText("content");
+
+ const provider = new HocuspocusProvider({
+ url: getCollabServerUrl(),
+ name: roomId,
+ document: ydoc,
+ onSynced: ({ state }) => {
+ if (state && options.initialContent && yText.length === 0) {
+ ydoc.transact(() => {
+ yText.insert(0, options.initialContent ?? "");
+ });
+ }
+ },
+ });
+
+ const selfName = getOrCreateUserName();
+ const colors = getColorForClientId(ydoc.clientID);
+ provider.setAwarenessField("user", {
+ name: selfName,
+ color: colors.color,
+ colorLight: colors.colorLight,
+ });
+
+ const room: LibraryDocRoom = {
+ provider,
+ ydoc,
+ yText,
+ roomId,
+ destroy: () => {
+ provider.destroy();
+ ydoc.destroy();
+ activeRooms.delete(roomId);
+ },
+ };
+ activeRooms.set(roomId, room);
+ return room;
+}
+
+export function closeLibraryDocRoom(roomId: string): void {
+ const room = activeRooms.get(roomId);
+ if (room) room.destroy();
+}
+
+export function getActiveLibraryDocRoom(roomId: string): LibraryDocRoom | undefined {
+ return activeRooms.get(roomId);
+}
diff --git a/src/lib/library-client.ts b/src/lib/library-client.ts
new file mode 100644
index 0000000..077a38b
--- /dev/null
+++ b/src/lib/library-client.ts
@@ -0,0 +1,258 @@
+"use client";
+
+import type {
+ LibraryScope,
+ SkillRef,
+ SkillBundle,
+ ValidationWarning,
+} from "@/types/library";
+import type {
+ PackRecord,
+ SkillRecord,
+ LibraryDocumentRecord,
+ LibraryDocumentVersionRecord,
+ PackVersionRecord,
+ SkillVersionRecord,
+ MergeRecord,
+ ConflictRecord,
+ LibraryRecord,
+ DocumentRole,
+} from "@/lib/library-store/types";
+
+const BRAIN_TOKEN_KEY = "nexus:brain-token";
+
+function getStoredToken(): string | null {
+ if (typeof window === "undefined") return null;
+ return window.localStorage.getItem(BRAIN_TOKEN_KEY);
+}
+
+async function request(input: string, init?: RequestInit): Promise {
+ const token = getStoredToken();
+ const headers = new Headers(init?.headers);
+ if (!headers.has("content-type") && init?.body) {
+ headers.set("content-type", "application/json");
+ }
+ if (token) headers.set("authorization", `Bearer ${token}`);
+ const response = await fetch(input, { ...init, headers });
+ const json = await response.json().catch(() => null) as { error?: string } & T | null;
+ if (!response.ok) {
+ throw new Error(json?.error ?? `Request failed: ${response.status}`);
+ }
+ return json as T;
+}
+
+export interface LibrarySession {
+ workspaceId: string;
+ ownerUserId: string | null;
+ libraries: LibraryRecord[];
+}
+
+export async function libraryBootstrap(ownerUserId: string | null = null): Promise {
+ return request("/api/library/session", {
+ method: "POST",
+ body: JSON.stringify({ token: getStoredToken(), ownerUserId }),
+ });
+}
+
+export async function listPacksForScope(scope: LibraryScope): Promise {
+ const { packs } = await request<{ packs: PackRecord[] }>(`/api/library/packs?scope=${scope}`);
+ return packs;
+}
+
+export async function createPack(scope: LibraryScope, packKey: string, name: string, description?: string): Promise {
+ const { pack } = await request<{ pack: PackRecord }>("/api/library/packs", {
+ method: "POST",
+ body: JSON.stringify({ scope, packKey, name, description }),
+ });
+ return pack;
+}
+
+export async function getPack(packId: string): Promise {
+ const { pack } = await request<{ pack: PackRecord | null }>(`/api/library/packs/${packId}`);
+ return pack;
+}
+
+export async function updatePack(packId: string, patch: { name?: string; description?: string; tags?: string[] }): Promise {
+ const { pack } = await request<{ pack: PackRecord }>(`/api/library/packs/${packId}`, {
+ method: "PATCH",
+ body: JSON.stringify(patch),
+ });
+ return pack;
+}
+
+export async function softDeletePack(packId: string): Promise {
+ await request<{ deleted: true }>(`/api/library/packs/${packId}`, { method: "DELETE" });
+}
+
+export async function forkPack(packId: string, targetScope: LibraryScope = "user"): Promise {
+ const { pack } = await request<{ pack: PackRecord }>(`/api/library/packs/${packId}/fork`, {
+ method: "POST",
+ body: JSON.stringify({ targetScope }),
+ });
+ return pack;
+}
+
+export async function listDocuments(packId: string): Promise {
+ const { documents } = await request<{ documents: LibraryDocumentRecord[] }>(`/api/library/packs/${packId}/documents`);
+ return documents;
+}
+
+export async function createDocument(packId: string, payload: { role: DocumentRole; path: string; content: string }): Promise<{ document: LibraryDocumentRecord; version: LibraryDocumentVersionRecord }> {
+ return request(`/api/library/packs/${packId}/documents`, {
+ method: "POST",
+ body: JSON.stringify(payload),
+ });
+}
+
+export async function updateDocument(packId: string, docId: string, patch: { path?: string; role?: DocumentRole }): Promise {
+ const { document } = await request<{ document: LibraryDocumentRecord }>(`/api/library/packs/${packId}/documents/${docId}`, {
+ method: "PATCH",
+ body: JSON.stringify(patch),
+ });
+ return document;
+}
+
+export async function deleteDocument(packId: string, docId: string): Promise {
+ await request<{ deleted: true }>(`/api/library/packs/${packId}/documents/${docId}`, { method: "DELETE" });
+}
+
+export async function listDocumentVersions(packId: string, docId: string): Promise {
+ const { versions } = await request<{ versions: LibraryDocumentVersionRecord[] }>(`/api/library/packs/${packId}/documents/${docId}/versions`);
+ return versions;
+}
+
+export async function saveDocumentVersion(packId: string, docId: string, payload: { content: string; previousVersionId: string | null; message?: string }): Promise {
+ const { version } = await request<{ version: LibraryDocumentVersionRecord }>(`/api/library/packs/${packId}/documents/${docId}/versions`, {
+ method: "POST",
+ body: JSON.stringify(payload),
+ });
+ return version;
+}
+
+export async function getDocumentVersionContent(packId: string, docId: string, versionId: string): Promise {
+ const { content } = await request<{ content: string }>(`/api/library/packs/${packId}/documents/${docId}/versions/${versionId}/content`);
+ return content;
+}
+
+export async function listSkills(packId: string): Promise {
+ const { skills } = await request<{ skills: SkillRecord[] }>(`/api/library/packs/${packId}/skills`);
+ return skills;
+}
+
+export async function createSkill(packId: string, payload: { skillKey: string; name: string; description: string; entrypointDocId: string }): Promise {
+ const { skill } = await request<{ skill: SkillRecord }>(`/api/library/packs/${packId}/skills`, {
+ method: "POST",
+ body: JSON.stringify(payload),
+ });
+ return skill;
+}
+
+export async function updateSkill(packId: string, skillId: string, patch: { name?: string; description?: string; deprecated?: boolean }): Promise {
+ const { skill } = await request<{ skill: SkillRecord }>(`/api/library/packs/${packId}/skills/${skillId}`, {
+ method: "PATCH",
+ body: JSON.stringify(patch),
+ });
+ return skill;
+}
+
+export async function deleteSkill(packId: string, skillId: string): Promise {
+ await request<{ deleted: true }>(`/api/library/packs/${packId}/skills/${skillId}`, { method: "DELETE" });
+}
+
+export async function publishPackVersion(packId: string, payload: { version: string; notes?: string }): Promise {
+ const { packVersion } = await request<{ packVersion: PackVersionRecord }>(`/api/library/packs/${packId}/versions`, {
+ method: "POST",
+ body: JSON.stringify(payload),
+ });
+ return packVersion;
+}
+
+export async function listPackVersions(packId: string): Promise {
+ const { versions } = await request<{ versions: PackVersionRecord[] }>(`/api/library/packs/${packId}/versions`);
+ return versions;
+}
+
+export async function publishSkillVersion(packId: string, skillId: string, payload: { version: string; notes?: string; linkToLatestPackVersion?: boolean }): Promise {
+ const { skillVersion } = await request<{ skillVersion: SkillVersionRecord }>(`/api/library/packs/${packId}/skills/${skillId}/versions`, {
+ method: "POST",
+ body: JSON.stringify(payload),
+ });
+ return skillVersion;
+}
+
+export async function listSkillVersions(packId: string, skillId: string): Promise {
+ const { versions } = await request<{ versions: SkillVersionRecord[] }>(`/api/library/packs/${packId}/skills/${skillId}/versions`);
+ return versions;
+}
+
+export async function mergeBaseIntoBranch(packId: string): Promise {
+ const { merge } = await request<{ merge: MergeRecord }>(`/api/library/packs/${packId}/merge-base`, {
+ method: "POST",
+ body: JSON.stringify({}),
+ });
+ return merge;
+}
+
+export async function listMergeConflicts(packId: string, mergeId: string): Promise {
+ const { conflicts } = await request<{ conflicts: ConflictRecord[] }>(`/api/library/packs/${packId}/merges/${mergeId}/resolve`);
+ return conflicts;
+}
+
+export async function resolveMergeConflict(packId: string, mergeId: string, payload: { resolvedContentByDocId: Record; resolvedBy?: string }): Promise {
+ const { merge } = await request<{ merge: MergeRecord }>(`/api/library/packs/${packId}/merges/${mergeId}/resolve`, {
+ method: "POST",
+ body: JSON.stringify(payload),
+ });
+ return merge;
+}
+
+export async function resolveLiveSkill(ref: SkillRef): Promise {
+ const { bundle } = await request<{ bundle: SkillBundle | null }>("/api/library/resolve", {
+ method: "POST",
+ body: JSON.stringify(ref),
+ });
+ return bundle;
+}
+
+export async function validatePack(packId: string): Promise {
+ const { warnings } = await request<{ warnings: ValidationWarning[] }>(`/api/library/packs/${packId}?validate=1`);
+ return warnings;
+}
+
+export async function exportNexusArchive(workflowJson: unknown, workflowName: string): Promise {
+ const token = getStoredToken();
+ const headers = new Headers();
+ headers.set("content-type", "application/json");
+ if (token) headers.set("authorization", `Bearer ${token}`);
+ const response = await fetch("/api/library/export", {
+ method: "POST",
+ headers,
+ body: JSON.stringify({ workflowJson, workflowName }),
+ });
+ if (!response.ok) {
+ const error = await response.json().catch(() => null);
+ throw new Error(error?.error ?? `Export failed: ${response.status}`);
+ }
+ return response.blob();
+}
+
+export async function importNexusArchive(file: File, scope: LibraryScope = "workspace"): Promise {
+ const token = getStoredToken();
+ const headers = new Headers();
+ if (token) headers.set("authorization", `Bearer ${token}`);
+ const formData = new FormData();
+ formData.append("file", file);
+ formData.append("scope", scope);
+ formData.append("format", "nexus");
+ const response = await fetch("/api/library/import", {
+ method: "POST",
+ headers,
+ body: formData,
+ });
+ if (!response.ok) {
+ const error = await response.json().catch(() => null);
+ throw new Error(error?.error ?? `Import failed: ${response.status}`);
+ }
+ const { packs } = await response.json() as { packs: PackRecord[] };
+ return packs;
+}
diff --git a/src/lib/library-store/brain-migration.ts b/src/lib/library-store/brain-migration.ts
new file mode 100644
index 0000000..5f894ad
--- /dev/null
+++ b/src/lib/library-store/brain-migration.ts
@@ -0,0 +1,69 @@
+import { getBrainStore } from "@/lib/brain/server";
+import { getLibraryStore } from "./store";
+import type { DocumentRole } from "./types";
+
+export interface MigrateBrainOptions {
+ workspaceId: string;
+ ownerUserId?: string | null;
+ packKey?: string;
+ packName?: string;
+ createdBy?: string;
+}
+
+export async function migrateBrainDocsToUserLibrary(options: MigrateBrainOptions): Promise<{ packId: string; importedCount: number }> {
+ const brainDocs = await getBrainStore().listDocs(options.workspaceId);
+ const store = getLibraryStore();
+ const { user, workspace } = await store.ensureLibraries(options.workspaceId, options.ownerUserId ?? "default-user");
+ const targetLib = user ?? workspace;
+
+ const packKey = options.packKey ?? "brain-imported";
+ const existingPacks = await store.listPacks(targetLib.id, { includeDeleted: true });
+ let key = packKey;
+ let suffix = 1;
+ while (existingPacks.some((p) => p.packKey === key)) {
+ key = `${packKey}-${suffix++}`;
+ }
+
+ const pack = await store.createPack(targetLib.id, {
+ packKey: key,
+ name: options.packName ?? "Brain documents",
+ description: "Imported from Brain library",
+ tags: ["brain", "migrated"],
+ createdBy: options.createdBy ?? "",
+ metadata: { source: "brain-migration" },
+ });
+
+ let importedCount = 0;
+ for (const doc of brainDocs) {
+ const role = inferRole(doc.docType);
+ const ext = doc.docType === "data" ? ".json" : ".md";
+ const sanitizedTitle = doc.title.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-+|-+$/g, "");
+ const path = `${sanitizedTitle || doc.id}${ext}`;
+ await store.createDocument(pack.id, {
+ role,
+ path,
+ content: doc.content,
+ createdBy: options.createdBy ?? doc.createdBy ?? "",
+ message: "brain-migration",
+ metadata: { brainDocId: doc.id, docType: doc.docType, tags: doc.tags },
+ });
+ importedCount++;
+ }
+
+ return { packId: pack.id, importedCount };
+}
+
+function inferRole(docType: string): DocumentRole {
+ switch (docType) {
+ case "runbook":
+ case "guide":
+ return "doc";
+ case "summary":
+ case "note":
+ return "reference";
+ case "data":
+ return "asset";
+ default:
+ return "doc";
+ }
+}
diff --git a/src/lib/library-store/config.ts b/src/lib/library-store/config.ts
new file mode 100644
index 0000000..cb6be9c
--- /dev/null
+++ b/src/lib/library-store/config.ts
@@ -0,0 +1,24 @@
+import path from "node:path";
+import { getBrainConfig } from "@/lib/brain/config";
+
+export interface LibraryStoreConfig {
+ dataDir: string;
+ tokenSecret: string;
+}
+
+let cachedConfig: LibraryStoreConfig | null = null;
+
+export function getLibraryConfig(): LibraryStoreConfig {
+ if (cachedConfig) return cachedConfig;
+
+ cachedConfig = {
+ dataDir: process.env.NEXUS_LIBRARY_DATA_DIR ?? path.join(process.cwd(), ".nexus-library"),
+ tokenSecret: getBrainConfig().tokenSecret,
+ };
+
+ return cachedConfig;
+}
+
+export function resetLibraryConfigCache(): void {
+ cachedConfig = null;
+}
diff --git a/src/lib/library-store/export.ts b/src/lib/library-store/export.ts
new file mode 100644
index 0000000..9bee8aa
--- /dev/null
+++ b/src/lib/library-store/export.ts
@@ -0,0 +1,214 @@
+import JSZip from "jszip";
+import { customAlphabet } from "nanoid";
+import { getLibraryStore } from "./store";
+import { computeContentHash } from "./hashing";
+import { artifactDocumentKey, buildResolverKey } from "./resolver";
+import type { SkillBundle } from "./types";
+
+const nanoid = customAlphabet("abcdefghijklmnopqrstuvwxyz0123456789", 12);
+
+export interface NexusArchiveManifest {
+ schemaVersion: 1;
+ workflowName: string;
+ createdAt: string;
+ createdBy: string;
+ resolverMode: "artifact";
+ packs: Array<{
+ packId: string;
+ packKey: string;
+ packVersion: string;
+ scope: "workspace" | "user";
+ name: string;
+ skillIds: string[];
+ }>;
+ skills: Array<{
+ skillId: string;
+ skillKey: string;
+ name: string;
+ packId: string;
+ packVersion: string;
+ scope: "workspace" | "user";
+ }>;
+}
+
+export interface BuildArchiveInput {
+ workflowJson: unknown;
+ workflowName: string;
+ createdBy?: string;
+}
+
+interface ResolvedRef {
+ scope: "workspace" | "user";
+ packId: string;
+ packVersion: string;
+ skillId: string;
+}
+
+function collectSkillRefs(workflowJson: unknown): ResolvedRef[] {
+ if (!workflowJson || typeof workflowJson !== "object") return [];
+ const refs: ResolvedRef[] = [];
+ const seen = new Set();
+
+ function visit(value: unknown): void {
+ if (Array.isArray(value)) {
+ for (const v of value) visit(v);
+ return;
+ }
+ if (!value || typeof value !== "object") return;
+ const obj = value as Record;
+ if (
+ obj.libraryRef &&
+ typeof obj.libraryRef === "object" &&
+ obj.libraryRef !== null
+ ) {
+ const ref = obj.libraryRef as Record;
+ if (
+ typeof ref.scope === "string" &&
+ typeof ref.packId === "string" &&
+ typeof ref.packVersion === "string" &&
+ typeof ref.skillId === "string" &&
+ (ref.scope === "workspace" || ref.scope === "user")
+ ) {
+ const key = `${ref.scope}:${ref.packId}:${ref.packVersion}:${ref.skillId}`;
+ if (!seen.has(key)) {
+ seen.add(key);
+ refs.push({
+ scope: ref.scope as "workspace" | "user",
+ packId: ref.packId,
+ packVersion: ref.packVersion,
+ skillId: ref.skillId,
+ });
+ }
+ }
+ }
+ for (const v of Object.values(obj)) visit(v);
+ }
+
+ visit(workflowJson);
+ return refs;
+}
+
+export async function buildNexusArchive(input: BuildArchiveInput): Promise<{ buffer: Buffer; archiveName: string }> {
+ const store = getLibraryStore();
+ const refs = collectSkillRefs(input.workflowJson);
+ const bundles: SkillBundle[] = [];
+ for (const ref of refs) {
+ const bundle = await store.resolveLive(ref);
+ if (bundle) bundles.push(bundle);
+ }
+
+ const zip = new JSZip();
+ const hashes: Record = {};
+ const resolverMetadata: Record> = {};
+ const fileContents = new Map();
+
+ const workflowJsonText = JSON.stringify(input.workflowJson, null, 2);
+ zip.file("workflow.json", workflowJsonText);
+ hashes["workflow.json"] = computeContentHash(workflowJsonText);
+ fileContents.set("workflow.json", workflowJsonText);
+
+ const archiveManifest: NexusArchiveManifest = {
+ schemaVersion: 1,
+ workflowName: input.workflowName,
+ createdAt: new Date().toISOString(),
+ createdBy: input.createdBy ?? "",
+ resolverMode: "artifact",
+ packs: [],
+ skills: [],
+ };
+
+ const packAdded = new Set();
+ for (const bundle of bundles) {
+ const packKey = `${bundle.scope}:${bundle.packId}:${bundle.packVersion}`;
+ if (!packAdded.has(packKey)) {
+ packAdded.add(packKey);
+ archiveManifest.packs.push({
+ packId: bundle.packId,
+ packKey: bundle.packKey,
+ packVersion: bundle.packVersion,
+ scope: bundle.scope,
+ name: bundle.skillName,
+ skillIds: [],
+ });
+ const packManifestPath = `libraries/${bundle.scope}/packs/${bundle.packKey}/manifest.json`;
+ const packManifest = {
+ schemaVersion: 1,
+ packId: bundle.packId,
+ packKey: bundle.packKey,
+ version: bundle.packVersion,
+ scope: bundle.scope,
+ };
+ const packManifestText = JSON.stringify(packManifest, null, 2);
+ zip.file(packManifestPath, packManifestText);
+ hashes[packManifestPath] = computeContentHash(packManifestText);
+ fileContents.set(packManifestPath, packManifestText);
+ }
+
+ archiveManifest.packs.find((p) => p.packId === bundle.packId && p.packVersion === bundle.packVersion)?.skillIds.push(bundle.skillId);
+ archiveManifest.skills.push({
+ skillId: bundle.skillId,
+ skillKey: bundle.skillKey,
+ name: bundle.skillName,
+ packId: bundle.packId,
+ packVersion: bundle.packVersion,
+ scope: bundle.scope,
+ });
+
+ const entrypointPath = artifactDocumentKey(bundle.scope, bundle.packKey, bundle.skillKey, "SKILL.md");
+ zip.file(entrypointPath, bundle.entrypoint.content);
+ hashes[entrypointPath] = computeContentHash(bundle.entrypoint.content);
+ fileContents.set(entrypointPath, bundle.entrypoint.content);
+
+ const documentPaths: string[] = [];
+ for (const doc of bundle.documents) {
+ const docKey = artifactDocumentKey(bundle.scope, bundle.packKey, bundle.skillKey, doc.path);
+ zip.file(docKey, doc.content);
+ hashes[docKey] = computeContentHash(doc.content);
+ fileContents.set(docKey, doc.content);
+ documentPaths.push(doc.path);
+ }
+
+ resolverMetadata[buildResolverKey(bundle.scope, bundle.packId, bundle.packVersion, bundle.skillId)] = buildResolverEntry(bundle, documentPaths);
+ }
+
+ const manifestText = JSON.stringify(archiveManifest, null, 2);
+ zip.file("manifest.json", manifestText);
+ hashes["manifest.json"] = computeContentHash(manifestText);
+
+ const resolverText = JSON.stringify({ schemaVersion: 1, entries: resolverMetadata }, null, 2);
+ zip.file("runtime/resolver-metadata.json", resolverText);
+ hashes["runtime/resolver-metadata.json"] = computeContentHash(resolverText);
+
+ const hashesText = JSON.stringify(hashes, null, 2);
+ zip.file("hashes.json", hashesText);
+
+ for (const [path, expected] of Object.entries(hashes)) {
+ if (path === "hashes.json") continue;
+ const actual = fileContents.has(path)
+ ? computeContentHash(fileContents.get(path)!)
+ : expected;
+ if (actual !== expected) {
+ throw new Error(`Integrity check failed for ${path}`);
+ }
+ }
+
+ const buffer = await zip.generateAsync({ type: "nodebuffer" });
+ const archiveName = input.workflowName.replace(/[^a-zA-Z0-9_\-]/g, "-").toLowerCase() || "workflow";
+ return { buffer, archiveName: `${archiveName}-${nanoid()}.nexus` };
+}
+
+function buildResolverEntry(bundle: SkillBundle, documentPaths: string[]) {
+ return {
+ scope: bundle.scope,
+ packId: bundle.packId,
+ packKey: bundle.packKey,
+ packVersion: bundle.packVersion,
+ skillId: bundle.skillId,
+ skillKey: bundle.skillKey,
+ skillName: bundle.skillName,
+ description: bundle.description,
+ entrypointPath: "SKILL.md",
+ documentPaths,
+ manifestHash: bundle.manifestHash,
+ };
+}
diff --git a/src/lib/library-store/hashing.ts b/src/lib/library-store/hashing.ts
new file mode 100644
index 0000000..91ce466
--- /dev/null
+++ b/src/lib/library-store/hashing.ts
@@ -0,0 +1,23 @@
+import { createHash } from "node:crypto";
+
+export function sha256(content: string | Buffer | Uint8Array): string {
+ const hash = createHash("sha256");
+ if (typeof content === "string") {
+ hash.update(content, "utf8");
+ } else {
+ hash.update(Buffer.isBuffer(content) ? content : Buffer.from(content));
+ }
+ return hash.digest("hex");
+}
+
+export function computeContentHash(content: string | Buffer | Uint8Array): string {
+ return sha256(content);
+}
+
+export function buildHashManifest(entries: Record): Record {
+ const manifest: Record = {};
+ for (const [path, value] of Object.entries(entries)) {
+ manifest[path] = sha256(value);
+ }
+ return manifest;
+}
diff --git a/src/lib/library-store/import.ts b/src/lib/library-store/import.ts
new file mode 100644
index 0000000..702982e
--- /dev/null
+++ b/src/lib/library-store/import.ts
@@ -0,0 +1,212 @@
+import JSZip from "jszip";
+import { computeContentHash } from "./hashing";
+import { getLibraryStore } from "./store";
+import type { LibraryScope, PackRecord } from "./types";
+
+export interface ImportNexusInput {
+ buffer: Buffer | ArrayBuffer | Uint8Array;
+ workspaceId: string;
+ ownerUserId?: string | null;
+ scope?: LibraryScope;
+ createdBy?: string;
+}
+
+export interface ImportResult {
+ packs: PackRecord[];
+}
+
+interface ArchiveManifest {
+ schemaVersion: number;
+ workflowName: string;
+ packs: Array<{
+ packId: string;
+ packKey: string;
+ packVersion: string;
+ scope: "workspace" | "user";
+ name: string;
+ skillIds: string[];
+ }>;
+ skills: Array<{
+ skillId: string;
+ skillKey: string;
+ name: string;
+ packId: string;
+ packVersion: string;
+ scope: "workspace" | "user";
+ }>;
+}
+
+export async function importNexusArchive(input: ImportNexusInput): Promise {
+ const zip = await JSZip.loadAsync(input.buffer as Buffer);
+ const manifestRaw = await zip.file("manifest.json")?.async("string");
+ if (!manifestRaw) throw new Error("Archive missing manifest.json");
+ const manifest = JSON.parse(manifestRaw) as ArchiveManifest;
+
+ const hashesRaw = await zip.file("hashes.json")?.async("string");
+ if (hashesRaw) {
+ const hashes = JSON.parse(hashesRaw) as Record;
+ for (const [path, expected] of Object.entries(hashes)) {
+ const file = zip.file(path);
+ if (!file) throw new Error(`Archive missing referenced file ${path}`);
+ const content = await file.async("string");
+ const actual = computeContentHash(content);
+ if (actual !== expected) {
+ throw new Error(`Hash mismatch for ${path}`);
+ }
+ }
+ }
+
+ const store = getLibraryStore();
+ const { workspace, user } = await store.ensureLibraries(input.workspaceId, input.ownerUserId ?? null);
+ const targetLibrary = (input.scope ?? "workspace") === "user" && user ? user : workspace;
+
+ const result: ImportResult = { packs: [] };
+
+ for (const packEntry of manifest.packs) {
+ let packKey = packEntry.packKey;
+ let suffix = 1;
+ const existingKeys = (await store.listPacks(targetLibrary.id, { includeDeleted: true })).map((p) => p.packKey);
+ while (existingKeys.includes(packKey)) {
+ packKey = `${packEntry.packKey}-imported-${suffix++}`;
+ }
+
+ const pack = await store.createPack(targetLibrary.id, {
+ packKey,
+ name: packEntry.name || packEntry.packKey,
+ description: `Imported from ${manifest.workflowName}`,
+ tags: [],
+ createdBy: input.createdBy ?? "",
+ metadata: { external: true, originalPackId: packEntry.packId, originalPackVersion: packEntry.packVersion },
+ });
+
+ const skillsForPack = manifest.skills.filter((s) => s.packId === packEntry.packId && s.packVersion === packEntry.packVersion);
+ for (const skillEntry of skillsForPack) {
+ const skillFolderPrefix = `libraries/${packEntry.scope}/packs/${packEntry.packKey}/skills/${skillEntry.skillKey}/`;
+ const entrypointFile = zip.file(`${skillFolderPrefix}SKILL.md`);
+ if (!entrypointFile) continue;
+ const entrypointContent = await entrypointFile.async("string");
+
+ const { document } = await store.createDocument(pack.id, {
+ role: "skill-entrypoint",
+ path: "SKILL.md",
+ content: entrypointContent,
+ createdBy: input.createdBy ?? "",
+ message: "import",
+ });
+
+ await store.createSkill(pack.id, {
+ skillKey: skillEntry.skillKey,
+ name: skillEntry.name,
+ description: "",
+ entrypointDocId: document.id,
+ createdBy: input.createdBy ?? "",
+ });
+ }
+
+ const packPrefix = `libraries/${packEntry.scope}/packs/${packEntry.packKey}/`;
+ for (const filename of Object.keys(zip.files)) {
+ if (!filename.startsWith(packPrefix)) continue;
+ if (filename.endsWith("/")) continue;
+ if (filename === `${packPrefix}manifest.json`) continue;
+ if (filename.includes("/skills/")) continue;
+ const content = await zip.file(filename)!.async("string");
+ const relPath = filename.slice(packPrefix.length);
+ const role = inferRole(relPath);
+ await store.createDocument(pack.id, {
+ role,
+ path: relPath,
+ content,
+ createdBy: input.createdBy ?? "",
+ message: "import",
+ });
+ }
+
+ result.packs.push(pack);
+ }
+
+ return result;
+}
+
+function inferRole(relPath: string): "doc" | "rule" | "asset" | "template" | "example" | "reference" {
+ if (relPath.startsWith("rules/")) return "rule";
+ if (relPath.startsWith("docs/")) return "doc";
+ if (relPath.startsWith("assets/")) return "asset";
+ if (relPath.startsWith("templates/")) return "template";
+ if (relPath.startsWith("examples/")) return "example";
+ if (relPath.startsWith("references/")) return "reference";
+ return "doc";
+}
+
+export interface ImportAgentSkillsInput {
+ buffer: Buffer | ArrayBuffer | Uint8Array;
+ workspaceId: string;
+ ownerUserId?: string | null;
+ scope?: LibraryScope;
+ packKey: string;
+ packName?: string;
+ createdBy?: string;
+}
+
+export async function importAgentSkillsFolder(input: ImportAgentSkillsInput): Promise {
+ const zip = await JSZip.loadAsync(input.buffer as Buffer);
+ const store = getLibraryStore();
+ const { workspace, user } = await store.ensureLibraries(input.workspaceId, input.ownerUserId ?? null);
+ const targetLibrary = (input.scope ?? "user") === "user" && user ? user : workspace;
+
+ let packKey = input.packKey;
+ const existingKeys = (await store.listPacks(targetLibrary.id, { includeDeleted: true })).map((p) => p.packKey);
+ let suffix = 1;
+ while (existingKeys.includes(packKey)) {
+ packKey = `${input.packKey}-imported-${suffix++}`;
+ }
+ const pack = await store.createPack(targetLibrary.id, {
+ packKey,
+ name: input.packName ?? input.packKey,
+ description: "",
+ tags: [],
+ createdBy: input.createdBy ?? "",
+ metadata: { external: true },
+ });
+
+ const skillFiles = Object.keys(zip.files).filter((f) => f.endsWith("SKILL.md") && !zip.files[f].dir);
+ for (const skillFile of skillFiles) {
+ const content = await zip.file(skillFile)!.async("string");
+ const folder = skillFile.replace(/\/?SKILL\.md$/, "");
+ const skillKey = folder.split("/").filter(Boolean).pop() ?? "skill";
+ const sanitizedKey = skillKey.toLowerCase().replace(/[^a-z0-9\-]/g, "-").replace(/^-+|-+$/g, "") || "skill";
+
+ const { document } = await store.createDocument(pack.id, {
+ role: "skill-entrypoint",
+ path: `${sanitizedKey}/SKILL.md`,
+ content,
+ createdBy: input.createdBy ?? "",
+ message: "import-agent-skill",
+ });
+
+ await store.createSkill(pack.id, {
+ skillKey: sanitizedKey,
+ name: sanitizedKey,
+ description: "",
+ entrypointDocId: document.id,
+ createdBy: input.createdBy ?? "",
+ });
+
+ const folderPrefix = folder ? `${folder}/` : "";
+ for (const otherFile of Object.keys(zip.files)) {
+ if (otherFile === skillFile) continue;
+ if (folderPrefix && !otherFile.startsWith(folderPrefix)) continue;
+ if (zip.files[otherFile].dir) continue;
+ const otherContent = await zip.file(otherFile)!.async("string");
+ const rel = folderPrefix ? otherFile.slice(folderPrefix.length) : otherFile;
+ await store.createDocument(pack.id, {
+ role: rel.startsWith("references/") ? "reference" : "doc",
+ path: `${sanitizedKey}/${rel}`,
+ content: otherContent,
+ createdBy: input.createdBy ?? "",
+ message: "import-agent-skill",
+ });
+ }
+ }
+
+ return { packs: [pack] };
+}
diff --git a/src/lib/library-store/index.ts b/src/lib/library-store/index.ts
new file mode 100644
index 0000000..ac00c6a
--- /dev/null
+++ b/src/lib/library-store/index.ts
@@ -0,0 +1,48 @@
+export * from "./types";
+export {
+ LibraryStore,
+ getLibraryStore,
+ resetLibraryStoreForTests,
+ StaleVersionError,
+ NotFoundError,
+ ValidationError,
+ parseSkillFrontmatter,
+} from "./store";
+export { threeWayTextMerge } from "./merge";
+export type { ThreeWayMergeResult, MergeConflict, MergeConflictType } from "./merge";
+export { buildManifest } from "./manifest";
+export { validatePack, parseFrontmatter } from "./validation";
+export { resolveLive, resolveFromArtifact, buildResolverKey, artifactDocumentKey } from "./resolver";
+export type { ArtifactResolverData } from "./resolver";
+export { buildNexusArchive } from "./export";
+export type { NexusArchiveManifest, BuildArchiveInput } from "./export";
+export { importNexusArchive, importAgentSkillsFolder } from "./import";
+export type { ImportNexusInput, ImportResult, ImportAgentSkillsInput } from "./import";
+export { computeContentHash, sha256, buildHashManifest } from "./hashing";
+export { getLibraryConfig, resetLibraryConfigCache } from "./config";
+export { OBJECT_KEYS, FilesystemObjectStorage } from "./object-store";
+export type { ObjectStorage } from "./object-store";
+export {
+ manifestSchemaV1,
+ libraryScopeSchema,
+ documentRoleSchema,
+ skillFrontmatterSchema,
+ semverSchema,
+ createPackSchema,
+ updatePackSchema,
+ forkPackSchema,
+ createDocumentSchema,
+ updateDocumentSchema,
+ saveDocumentVersionSchema,
+ createSkillSchema,
+ updateSkillSchema,
+ publishPackSchema,
+ publishSkillSchema,
+ mergeBaseSchema,
+ resolveConflictSchema,
+ resolveLiveSchema,
+ exportRequestSchema,
+ importRequestSchema,
+ sessionRequestSchema,
+} from "./schemas";
+export type { ManifestSchemaV1 } from "./schemas";
diff --git a/src/lib/library-store/manifest.ts b/src/lib/library-store/manifest.ts
new file mode 100644
index 0000000..0ae65fd
--- /dev/null
+++ b/src/lib/library-store/manifest.ts
@@ -0,0 +1,60 @@
+import type {
+ LibraryDocumentRecord,
+ PackRecord,
+ SkillRecord,
+} from "./types";
+import type { ManifestSchemaV1 } from "./schemas";
+
+export interface BuildManifestInput {
+ pack: PackRecord;
+ skills: SkillRecord[];
+ documents: LibraryDocumentRecord[];
+ scope: "workspace" | "user";
+ version?: string;
+}
+
+export function buildManifest(input: BuildManifestInput): ManifestSchemaV1 {
+ const { pack, skills, documents, scope, version } = input;
+ const activeDocuments = documents.filter((d) => d.deletedAt === null);
+ const activeSkills = skills.filter((s) => s.deletedAt === null);
+ const docsByRole = activeDocuments.reduce>((acc, d) => {
+ (acc[d.role] ??= []).push(d);
+ return acc;
+ }, {});
+ const docById = new Map(activeDocuments.map((d) => [d.id, d]));
+
+ const skillsMap: ManifestSchemaV1["skills"] = {};
+ for (const skill of activeSkills) {
+ const entrypoint = docById.get(skill.entrypointDocId);
+ if (!entrypoint) continue;
+ skillsMap[skill.skillKey] = {
+ skillId: skill.id,
+ skillKey: skill.skillKey,
+ name: skill.name,
+ description: skill.description,
+ entrypoint: entrypoint.path,
+ documents: (docsByRole["reference"] ?? []).map((d) => d.path),
+ rules: (docsByRole["rule"] ?? []).map((d) => d.path),
+ };
+ }
+
+ return {
+ schemaVersion: 1,
+ packId: pack.id,
+ packKey: pack.packKey,
+ name: pack.name,
+ description: pack.description,
+ version: version ?? "draft",
+ scope,
+ skills: skillsMap,
+ docs: (docsByRole["doc"] ?? []).map((d) => d.path),
+ rules: (docsByRole["rule"] ?? []).map((d) => d.path),
+ assets: (docsByRole["asset"] ?? []).map((d) => d.path),
+ templates: (docsByRole["template"] ?? []).map((d) => d.path),
+ examples: (docsByRole["example"] ?? []).map((d) => d.path),
+ external: pack.external,
+ basePackId: pack.basePackId,
+ createdAt: pack.createdAt,
+ updatedAt: pack.updatedAt,
+ };
+}
diff --git a/src/lib/library-store/merge.ts b/src/lib/library-store/merge.ts
new file mode 100644
index 0000000..60dcd40
--- /dev/null
+++ b/src/lib/library-store/merge.ts
@@ -0,0 +1,224 @@
+export type MergeConflictType = "text_conflict" | "delete_edit" | "add_add";
+
+export interface MergeConflict {
+ conflictType: MergeConflictType;
+ ancestor: string | null;
+ base: string | null;
+ branch: string | null;
+}
+
+export interface ThreeWayMergeResult {
+ content: string;
+ conflicts: MergeConflict[];
+ cleanlyMerged: boolean;
+}
+
+function splitLines(text: string): string[] {
+ if (text === "") return [];
+ return text.split(/\r?\n/);
+}
+
+function joinLines(lines: string[]): string {
+ return lines.join("\n");
+}
+
+function lcs(a: string[], b: string[]): number[][] {
+ const m = a.length;
+ const n = b.length;
+ const dp: number[][] = Array.from({ length: m + 1 }, () => new Array(n + 1).fill(0));
+ for (let i = m - 1; i >= 0; i--) {
+ for (let j = n - 1; j >= 0; j--) {
+ if (a[i] === b[j]) {
+ dp[i][j] = dp[i + 1][j + 1] + 1;
+ } else {
+ dp[i][j] = Math.max(dp[i + 1][j], dp[i][j + 1]);
+ }
+ }
+ }
+ return dp;
+}
+
+interface DiffOp {
+ type: "equal" | "insert" | "delete" | "replace";
+ ancestorStart: number;
+ ancestorEnd: number;
+ branchStart: number;
+ branchEnd: number;
+}
+
+function diff(a: string[], b: string[]): DiffOp[] {
+ const dp = lcs(a, b);
+ const ops: DiffOp[] = [];
+ let i = 0;
+ let j = 0;
+ let ancestorStart = 0;
+ let branchStart = 0;
+ let currentType: "equal" | "diff" | null = null;
+
+ const flush = (type: "equal" | "diff", aEnd: number, bEnd: number): void => {
+ if (currentType === null) return;
+ if (currentType === "equal") {
+ ops.push({ type: "equal", ancestorStart, ancestorEnd: aEnd, branchStart, branchEnd: bEnd });
+ } else {
+ const aLen = aEnd - ancestorStart;
+ const bLen = bEnd - branchStart;
+ let opType: "insert" | "delete" | "replace";
+ if (aLen === 0) opType = "insert";
+ else if (bLen === 0) opType = "delete";
+ else opType = "replace";
+ ops.push({ type: opType, ancestorStart, ancestorEnd: aEnd, branchStart, branchEnd: bEnd });
+ }
+ void type;
+ };
+
+ while (i < a.length || j < b.length) {
+ if (i < a.length && j < b.length && a[i] === b[j]) {
+ if (currentType !== "equal") {
+ flush("equal", i, j);
+ currentType = "equal";
+ ancestorStart = i;
+ branchStart = j;
+ }
+ i++;
+ j++;
+ } else if (j < b.length && (i >= a.length || dp[i][j + 1] >= dp[i + 1][j])) {
+ if (currentType !== "diff") {
+ flush("diff", i, j);
+ currentType = "diff";
+ ancestorStart = i;
+ branchStart = j;
+ }
+ j++;
+ } else {
+ if (currentType !== "diff") {
+ flush("diff", i, j);
+ currentType = "diff";
+ ancestorStart = i;
+ branchStart = j;
+ }
+ i++;
+ }
+ }
+ flush(currentType === "equal" ? "equal" : "diff", i, j);
+ return ops;
+}
+
+export function threeWayTextMerge(
+ ancestor: string,
+ theirs: string,
+ yours: string,
+): ThreeWayMergeResult {
+ const ancestorLines = splitLines(ancestor);
+ const theirsLines = splitLines(theirs);
+ const yoursLines = splitLines(yours);
+
+ if (theirs === yours) {
+ return { content: theirs, conflicts: [], cleanlyMerged: true };
+ }
+
+ if (ancestor === theirs) {
+ return { content: yours, conflicts: [], cleanlyMerged: true };
+ }
+ if (ancestor === yours) {
+ return { content: theirs, conflicts: [], cleanlyMerged: true };
+ }
+
+ const theirsOps = diff(ancestorLines, theirsLines);
+ const yoursOps = diff(ancestorLines, yoursLines);
+
+ const ancestorLen = ancestorLines.length;
+ const theirsChanged = new Array(ancestorLen).fill(false);
+ const yoursChanged = new Array(ancestorLen).fill(false);
+
+ const theirsReplacement = new Map();
+ const yoursReplacement = new Map();
+
+ for (const op of theirsOps) {
+ if (op.type === "equal") continue;
+ const repl = theirsLines.slice(op.branchStart, op.branchEnd);
+ theirsReplacement.set(op.ancestorStart, repl);
+ if (op.ancestorEnd === op.ancestorStart) {
+ theirsChanged[op.ancestorStart] = true;
+ }
+ for (let k = op.ancestorStart; k < op.ancestorEnd; k++) {
+ theirsChanged[k] = true;
+ }
+ }
+ for (const op of yoursOps) {
+ if (op.type === "equal") continue;
+ const repl = yoursLines.slice(op.branchStart, op.branchEnd);
+ yoursReplacement.set(op.ancestorStart, repl);
+ if (op.ancestorEnd === op.ancestorStart) {
+ yoursChanged[op.ancestorStart] = true;
+ }
+ for (let k = op.ancestorStart; k < op.ancestorEnd; k++) {
+ yoursChanged[k] = true;
+ }
+ }
+
+ const conflicts: MergeConflict[] = [];
+ let hasConflict = false;
+ for (let k = 0; k < ancestorLen; k++) {
+ if (theirsChanged[k] && yoursChanged[k]) {
+ hasConflict = true;
+ break;
+ }
+ }
+ for (const start of theirsReplacement.keys()) {
+ if (yoursReplacement.has(start)) {
+ const t = theirsReplacement.get(start)!;
+ const y = yoursReplacement.get(start)!;
+ if (t.join("\n") !== y.join("\n")) {
+ hasConflict = true;
+ }
+ }
+ }
+
+ if (hasConflict) {
+ const block = [
+ "<<<<<<< yours",
+ yours,
+ "=======",
+ theirs,
+ ">>>>>>> theirs",
+ ].join("\n");
+ conflicts.push({
+ conflictType: ancestor === "" ? "add_add" : "text_conflict",
+ ancestor,
+ base: theirs,
+ branch: yours,
+ });
+ return { content: block, conflicts, cleanlyMerged: false };
+ }
+
+ const result: string[] = [];
+ for (let k = 0; k < ancestorLen; k++) {
+ if (theirsReplacement.has(k)) {
+ const repl = theirsReplacement.get(k)!;
+ result.push(...repl);
+ const op = theirsOps.find((o) => o.ancestorStart === k);
+ if (op && op.ancestorEnd > op.ancestorStart) {
+ k = op.ancestorEnd - 1;
+ continue;
+ }
+ } else if (yoursReplacement.has(k)) {
+ const repl = yoursReplacement.get(k)!;
+ result.push(...repl);
+ const op = yoursOps.find((o) => o.ancestorStart === k);
+ if (op && op.ancestorEnd > op.ancestorStart) {
+ k = op.ancestorEnd - 1;
+ continue;
+ }
+ } else {
+ result.push(ancestorLines[k]);
+ }
+ }
+ if (theirsReplacement.has(ancestorLen)) {
+ result.push(...theirsReplacement.get(ancestorLen)!);
+ }
+ if (yoursReplacement.has(ancestorLen) && !theirsReplacement.has(ancestorLen)) {
+ result.push(...yoursReplacement.get(ancestorLen)!);
+ }
+
+ return { content: joinLines(result), conflicts: [], cleanlyMerged: true };
+}
diff --git a/src/lib/library-store/object-store.ts b/src/lib/library-store/object-store.ts
new file mode 100644
index 0000000..0a6c74b
--- /dev/null
+++ b/src/lib/library-store/object-store.ts
@@ -0,0 +1,126 @@
+import fs from "node:fs/promises";
+import path from "node:path";
+
+export interface ObjectStorage {
+ putObject(key: string, value: Buffer | string, options?: { contentType?: string; immutable?: boolean }): Promise;
+ getObject(key: string): Promise;
+ getObjectAsString(key: string): Promise;
+ deleteObject(key: string): Promise;
+ objectExists(key: string): Promise;
+ listKeys(prefix: string): Promise;
+}
+
+async function ensureDir(dir: string): Promise {
+ await fs.mkdir(dir, { recursive: true });
+}
+
+async function atomicWrite(filePath: string, content: Buffer | string): Promise {
+ const tempPath = `${filePath}.tmp`;
+ await ensureDir(path.dirname(filePath));
+ await fs.writeFile(tempPath, content);
+ await fs.rename(tempPath, filePath);
+}
+
+function isImmutableKey(key: string): boolean {
+ return /\/versions\//.test(key) || key.startsWith("exports/");
+}
+
+export class FilesystemObjectStorage implements ObjectStorage {
+ constructor(private readonly dataDir: string) {}
+
+ private resolveKey(key: string): string {
+ const normalized = key.replace(/^\/+/, "");
+ if (normalized.includes("..")) {
+ throw new Error(`Invalid object key: ${key}`);
+ }
+ return path.join(this.dataDir, "objects", normalized);
+ }
+
+ async putObject(key: string, value: Buffer | string, options?: { immutable?: boolean }): Promise {
+ const filePath = this.resolveKey(key);
+ const immutable = options?.immutable ?? isImmutableKey(key);
+
+ if (immutable) {
+ try {
+ await fs.access(filePath);
+ return;
+ } catch {
+ }
+ }
+
+ await atomicWrite(filePath, value);
+ }
+
+ async getObject(key: string): Promise {
+ try {
+ return await fs.readFile(this.resolveKey(key));
+ } catch {
+ return null;
+ }
+ }
+
+ async getObjectAsString(key: string): Promise {
+ try {
+ return await fs.readFile(this.resolveKey(key), "utf8");
+ } catch {
+ return null;
+ }
+ }
+
+ async deleteObject(key: string): Promise {
+ try {
+ await fs.unlink(this.resolveKey(key));
+ } catch {
+ }
+ }
+
+ async objectExists(key: string): Promise {
+ try {
+ await fs.access(this.resolveKey(key));
+ return true;
+ } catch {
+ return false;
+ }
+ }
+
+ async listKeys(prefix: string): Promise {
+ const root = this.resolveKey(prefix);
+ const result: string[] = [];
+
+ async function walk(dir: string, base: string): Promise {
+ let entries: { name: string; isDirectory: () => boolean }[];
+ try {
+ entries = await fs.readdir(dir, { withFileTypes: true });
+ } catch {
+ return;
+ }
+ for (const entry of entries) {
+ const name = entry.name as string;
+ const full = path.join(dir, name);
+ const rel = path.posix.join(base, name);
+ if (entry.isDirectory()) {
+ await walk(full, rel);
+ } else {
+ result.push(rel);
+ }
+ }
+ }
+
+ await walk(root, prefix.replace(/^\/+/, ""));
+ return result;
+ }
+}
+
+export interface ObjectKeys {
+ documentVersionContent(docId: string, versionId: string): string;
+ documentVersionMetadata(docId: string, versionId: string): string;
+ packVersionManifest(packId: string, versionId: string): string;
+ exportArchive(exportId: string): string;
+}
+
+export const OBJECT_KEYS: ObjectKeys = {
+ documentVersionContent: (docId, versionId) => `documents/${docId}/versions/${versionId}/content.md`,
+ documentVersionMetadata: (docId, versionId) => `documents/${docId}/versions/${versionId}/metadata.json`,
+ packVersionManifest: (packId, versionId) => `packs/${packId}/versions/${versionId}/manifest.json`,
+ exportArchive: (exportId) => `exports/${exportId}/workflow-export.nexus`,
+};
diff --git a/src/lib/library-store/resolver.ts b/src/lib/library-store/resolver.ts
new file mode 100644
index 0000000..082dfe3
--- /dev/null
+++ b/src/lib/library-store/resolver.ts
@@ -0,0 +1,90 @@
+import { getLibraryStore } from "./store";
+import type { ResolveLiveInput, SkillBundle, SkillBundleDocument } from "./types";
+
+export async function resolveLive(input: ResolveLiveInput): Promise {
+ return getLibraryStore().resolveLive(input);
+}
+
+export interface ArtifactResolverData {
+ manifest: {
+ schemaVersion: number;
+ packs: Array<{
+ packId: string;
+ packKey: string;
+ packVersion: string;
+ scope: string;
+ }>;
+ };
+ resolverMetadata: Record;
+ files: Map;
+}
+
+export function resolveFromArtifact(input: ResolveLiveInput, artifact: ArtifactResolverData): SkillBundle | null {
+ const key = `${input.scope}:${input.packId}:${input.packVersion}:${input.skillId}`;
+ const meta = artifact.resolverMetadata[key];
+ if (!meta) return null;
+ const entrypointKey = artifactDocumentKey(meta.scope, meta.packKey, meta.skillKey, meta.entrypointPath);
+ const entrypointContent = artifact.files.get(entrypointKey);
+ if (entrypointContent === undefined) return null;
+
+ const entrypoint: SkillBundleDocument = {
+ docId: meta.entrypointPath,
+ path: meta.entrypointPath,
+ role: "skill-entrypoint",
+ content: entrypointContent,
+ contentHash: "",
+ };
+ const documents: SkillBundleDocument[] = [];
+ for (const docPath of meta.documentPaths) {
+ const fileKey = artifactDocumentKey(meta.scope, meta.packKey, meta.skillKey, docPath);
+ const content = artifact.files.get(fileKey);
+ if (content === undefined) continue;
+ documents.push({
+ docId: docPath,
+ path: docPath,
+ role: "reference",
+ content,
+ contentHash: "",
+ });
+ }
+
+ return {
+ scope: meta.scope,
+ packId: meta.packId,
+ packKey: meta.packKey,
+ packVersion: meta.packVersion,
+ skillId: meta.skillId,
+ skillKey: meta.skillKey,
+ skillName: meta.skillName,
+ description: meta.description,
+ entrypoint,
+ documents,
+ manifestHash: meta.manifestHash,
+ };
+}
+
+export function artifactDocumentKey(scope: string, packKey: string, skillKey: string, docPath: string): string {
+ if (docPath.endsWith("SKILL.md") && !docPath.includes("/")) {
+ return `libraries/${scope}/packs/${packKey}/skills/${skillKey}/${docPath}`;
+ }
+ if (docPath === "SKILL.md") {
+ return `libraries/${scope}/packs/${packKey}/skills/${skillKey}/SKILL.md`;
+ }
+ return `libraries/${scope}/packs/${packKey}/${docPath}`;
+}
+
+export function buildResolverKey(scope: string, packId: string, packVersion: string, skillId: string): string {
+ return `${scope}:${packId}:${packVersion}:${skillId}`;
+}
diff --git a/src/lib/library-store/schemas.ts b/src/lib/library-store/schemas.ts
new file mode 100644
index 0000000..eee0f65
--- /dev/null
+++ b/src/lib/library-store/schemas.ts
@@ -0,0 +1,162 @@
+import { z } from "zod/v4";
+
+export const libraryScopeSchema = z.enum(["workspace", "user"]);
+
+export const documentRoleSchema = z.enum([
+ "skill-entrypoint",
+ "reference",
+ "doc",
+ "rule",
+ "template",
+ "example",
+ "asset",
+ "script",
+ "manifest",
+]);
+
+const semverRegex = /^\d+\.\d+\.\d+(-[A-Za-z0-9.\-]+)?(\+[A-Za-z0-9.\-]+)?$/;
+export const semverSchema = z.string().regex(semverRegex, "Must be valid semver");
+
+export const skillFrontmatterSchema = z.object({
+ name: z.string().min(1),
+ description: z.string().min(1),
+ compatibility: z.string().optional(),
+ metadata: z.record(z.string(), z.unknown()).optional(),
+});
+
+export const manifestSkillSchema = z.object({
+ skillId: z.string().min(1),
+ skillKey: z.string().min(1),
+ name: z.string().min(1),
+ description: z.string(),
+ entrypoint: z.string().min(1),
+ documents: z.array(z.string()).default([]),
+ rules: z.array(z.string()).default([]),
+});
+
+export const manifestSchemaV1 = z.object({
+ schemaVersion: z.literal(1),
+ packId: z.string().min(1),
+ packKey: z.string().min(1),
+ name: z.string().min(1),
+ description: z.string().default(""),
+ version: z.string(),
+ scope: libraryScopeSchema,
+ skills: z.record(z.string(), manifestSkillSchema),
+ docs: z.array(z.string()).default([]),
+ rules: z.array(z.string()).default([]),
+ assets: z.array(z.string()).default([]),
+ templates: z.array(z.string()).default([]),
+ examples: z.array(z.string()).default([]),
+ external: z.boolean().default(false),
+ basePackId: z.string().nullable().default(null),
+ createdAt: z.string(),
+ updatedAt: z.string(),
+});
+
+export type ManifestSchemaV1 = z.infer;
+
+export const createPackSchema = z.object({
+ scope: libraryScopeSchema,
+ packKey: z.string().min(1).regex(/^[a-z0-9][a-z0-9\-]*$/, "Lowercase kebab-case"),
+ name: z.string().min(1),
+ description: z.string().default(""),
+ tags: z.array(z.string()).default([]),
+ createdBy: z.string().default(""),
+ metadata: z.record(z.string(), z.unknown()).optional(),
+});
+
+export const updatePackSchema = z.object({
+ name: z.string().min(1).optional(),
+ description: z.string().optional(),
+ tags: z.array(z.string()).optional(),
+ scope: libraryScopeSchema.optional(),
+});
+
+export const forkPackSchema = z.object({
+ targetScope: libraryScopeSchema.default("user"),
+ packKey: z.string().min(1).regex(/^[a-z0-9][a-z0-9\-]*$/).optional(),
+});
+
+export const createDocumentSchema = z.object({
+ role: documentRoleSchema,
+ path: z.string().min(1),
+ content: z.string().default(""),
+ createdBy: z.string().default(""),
+ message: z.string().default("create"),
+ metadata: z.record(z.string(), z.unknown()).optional(),
+});
+
+export const updateDocumentSchema = z.object({
+ role: documentRoleSchema.optional(),
+ path: z.string().min(1).optional(),
+});
+
+export const saveDocumentVersionSchema = z.object({
+ content: z.string(),
+ previousVersionId: z.string().nullable(),
+ message: z.string().default(""),
+ createdBy: z.string().default(""),
+ metadata: z.record(z.string(), z.unknown()).optional(),
+});
+
+export const createSkillSchema = z.object({
+ skillKey: z.string().min(1).regex(/^[a-z0-9][a-z0-9\-]*$/),
+ name: z.string().min(1),
+ description: z.string().default(""),
+ entrypointDocId: z.string().min(1),
+ createdBy: z.string().default(""),
+ metadata: z.record(z.string(), z.unknown()).optional(),
+});
+
+export const updateSkillSchema = z.object({
+ name: z.string().min(1).optional(),
+ description: z.string().optional(),
+ entrypointDocId: z.string().min(1).optional(),
+ deprecated: z.boolean().optional(),
+});
+
+export const publishPackSchema = z.object({
+ version: semverSchema,
+ notes: z.string().default(""),
+ createdBy: z.string().default(""),
+});
+
+export const publishSkillSchema = z.object({
+ version: semverSchema,
+ notes: z.string().default(""),
+ createdBy: z.string().default(""),
+ linkToLatestPackVersion: z.boolean().default(false),
+});
+
+export const mergeBaseSchema = z.object({
+ initiatedBy: z.string().default(""),
+});
+
+export const resolveConflictSchema = z.object({
+ resolvedContentByDocId: z.record(z.string(), z.string()),
+ resolvedBy: z.string().default(""),
+});
+
+export const resolveLiveSchema = z.object({
+ scope: libraryScopeSchema,
+ packId: z.string().min(1),
+ packVersion: z.union([semverSchema, z.literal("draft")]),
+ skillId: z.string().min(1),
+});
+
+export const exportRequestSchema = z.object({
+ workflowJson: z.unknown(),
+ workflowName: z.string().default("workflow"),
+ createdBy: z.string().default(""),
+});
+
+export const importRequestSchema = z.object({
+ format: z.enum(["nexus", "agent-skills"]).default("nexus"),
+ scope: libraryScopeSchema.default("workspace"),
+});
+
+export const sessionRequestSchema = z.object({
+ token: z.string().min(1).nullable().optional(),
+ ownerUserId: z.string().nullable().optional(),
+});
diff --git a/src/lib/library-store/store.ts b/src/lib/library-store/store.ts
new file mode 100644
index 0000000..9cae4a1
--- /dev/null
+++ b/src/lib/library-store/store.ts
@@ -0,0 +1,1161 @@
+import fs from "node:fs/promises";
+import path from "node:path";
+import { customAlphabet } from "nanoid";
+import { getLibraryConfig } from "./config";
+import { FilesystemObjectStorage, OBJECT_KEYS, type ObjectStorage } from "./object-store";
+import { computeContentHash } from "./hashing";
+import { threeWayTextMerge } from "./merge";
+import { buildManifest } from "./manifest";
+import { parseSkillFrontmatter, validatePack } from "./validation";
+import type {
+ BranchRecord,
+ ConflictRecord,
+ CreateDocumentInput,
+ CreatePackInput,
+ CreateSkillInput,
+ LibraryDocumentRecord,
+ LibraryDocumentVersionRecord,
+ LibraryManifest,
+ LibraryRecord,
+ MergeRecord,
+ PackRecord,
+ PackVersionDocumentRecord,
+ PackVersionRecord,
+ PublishPackVersionInput,
+ PublishSkillVersionInput,
+ ResolveLiveInput,
+ SaveDocumentVersionInput,
+ SkillBundle,
+ SkillBundleDocument,
+ SkillRecord,
+ SkillVersionDocumentRecord,
+ SkillVersionRecord,
+ ValidationWarning,
+} from "./types";
+
+const nanoid = customAlphabet("abcdefghijklmnopqrstuvwxyz0123456789", 12);
+const MANIFEST_FILE = "manifest.json";
+
+function nowIso(): string {
+ return new Date().toISOString();
+}
+
+function createEmptyManifest(): LibraryManifest {
+ return {
+ version: 1,
+ libraries: [],
+ packs: [],
+ skills: [],
+ documents: [],
+ versions: [],
+ packVersions: [],
+ packVersionDocuments: [],
+ skillVersions: [],
+ skillVersionDocuments: [],
+ branches: [],
+ merges: [],
+ conflicts: [],
+ };
+}
+
+async function ensureDir(dir: string): Promise {
+ await fs.mkdir(dir, { recursive: true });
+}
+
+async function readJsonFile(filePath: string, fallback: T): Promise {
+ try {
+ const raw = await fs.readFile(filePath, "utf8");
+ return JSON.parse(raw) as T;
+ } catch {
+ return fallback;
+ }
+}
+
+async function writeJsonFile(filePath: string, value: unknown): Promise {
+ await ensureDir(path.dirname(filePath));
+ await fs.writeFile(filePath, JSON.stringify(value, null, 2), "utf8");
+}
+
+export class StaleVersionError extends Error {
+ constructor(message: string) {
+ super(message);
+ this.name = "StaleVersionError";
+ }
+}
+
+export class NotFoundError extends Error {
+ constructor(message: string) {
+ super(message);
+ this.name = "NotFoundError";
+ }
+}
+
+export class ValidationError extends Error {
+ constructor(message: string, public readonly warnings: ValidationWarning[]) {
+ super(message);
+ this.name = "ValidationError";
+ }
+}
+
+export class LibraryStore {
+ private readonly dataDir = getLibraryConfig().dataDir;
+ private readonly objects: ObjectStorage = new FilesystemObjectStorage(this.dataDir);
+
+ getObjectStorage(): ObjectStorage {
+ return this.objects;
+ }
+
+ private manifestPath(): string {
+ return path.join(this.dataDir, MANIFEST_FILE);
+ }
+
+ async readManifest(): Promise {
+ await ensureDir(this.dataDir);
+ return readJsonFile(this.manifestPath(), createEmptyManifest());
+ }
+
+ async writeManifest(manifest: LibraryManifest): Promise {
+ await writeJsonFile(this.manifestPath(), manifest);
+ }
+
+ // ── Library bootstrap ─────────────────────────────────────────────────
+
+ async ensureLibraries(workspaceId: string, ownerUserId: string | null): Promise<{ workspace: LibraryRecord; user: LibraryRecord | null }> {
+ const manifest = await this.readManifest();
+ let workspace = manifest.libraries.find(
+ (l) => l.workspaceId === workspaceId && l.scope === "workspace" && l.deletedAt === null,
+ );
+ let user = ownerUserId
+ ? manifest.libraries.find(
+ (l) => l.workspaceId === workspaceId && l.scope === "user" && l.ownerUserId === ownerUserId && l.deletedAt === null,
+ )
+ : null;
+
+ let mutated = false;
+ if (!workspace) {
+ const now = nowIso();
+ workspace = {
+ id: nanoid(),
+ workspaceId,
+ scope: "workspace",
+ ownerUserId: null,
+ createdAt: now,
+ updatedAt: now,
+ deletedAt: null,
+ };
+ manifest.libraries.push(workspace);
+ mutated = true;
+ }
+ if (ownerUserId && !user) {
+ const now = nowIso();
+ user = {
+ id: nanoid(),
+ workspaceId,
+ scope: "user",
+ ownerUserId,
+ createdAt: now,
+ updatedAt: now,
+ deletedAt: null,
+ };
+ manifest.libraries.push(user);
+ mutated = true;
+ }
+
+ if (mutated) await this.writeManifest(manifest);
+ return { workspace: workspace!, user: user ?? null };
+ }
+
+ async listLibraries(workspaceId: string): Promise {
+ const manifest = await this.readManifest();
+ return manifest.libraries.filter((l) => l.workspaceId === workspaceId && l.deletedAt === null);
+ }
+
+ // ── Pack CRUD ─────────────────────────────────────────────────────────
+
+ async createPack(libraryId: string, input: CreatePackInput): Promise {
+ const manifest = await this.readManifest();
+ const library = manifest.libraries.find((l) => l.id === libraryId);
+ if (!library) throw new NotFoundError(`Library ${libraryId} not found`);
+
+ const dup = manifest.packs.find(
+ (p) => p.libraryId === libraryId && p.packKey === input.packKey && p.deletedAt === null,
+ );
+ if (dup) throw new Error(`Pack with key "${input.packKey}" already exists in library`);
+
+ const now = nowIso();
+ const branchId = nanoid();
+ const pack: PackRecord = {
+ id: nanoid(),
+ libraryId,
+ packKey: input.packKey,
+ name: input.name,
+ description: input.description ?? "",
+ tags: input.tags ?? [],
+ basePackId: null,
+ external: false,
+ currentBranchId: branchId,
+ createdBy: input.createdBy ?? "",
+ createdAt: now,
+ updatedAt: now,
+ deletedAt: null,
+ metadata: input.metadata,
+ };
+ const branch: BranchRecord = {
+ id: branchId,
+ packId: pack.id,
+ name: "main",
+ baseVersionByDocId: {},
+ headVersionByDocId: {},
+ createdAt: now,
+ updatedAt: now,
+ };
+ manifest.packs.push(pack);
+ manifest.branches.push(branch);
+ await this.writeManifest(manifest);
+ return pack;
+ }
+
+ async listPacks(libraryId: string, options?: { includeDeleted?: boolean }): Promise {
+ const manifest = await this.readManifest();
+ return manifest.packs.filter(
+ (p) => p.libraryId === libraryId && (options?.includeDeleted || p.deletedAt === null),
+ );
+ }
+
+ async listAllPacks(workspaceId: string, options?: { includeDeleted?: boolean }): Promise {
+ const manifest = await this.readManifest();
+ const libIds = new Set(
+ manifest.libraries
+ .filter((l) => l.workspaceId === workspaceId && l.deletedAt === null)
+ .map((l) => l.id),
+ );
+ return manifest.packs.filter(
+ (p) => libIds.has(p.libraryId) && (options?.includeDeleted || p.deletedAt === null),
+ );
+ }
+
+ async getPack(packId: string): Promise {
+ const manifest = await this.readManifest();
+ return manifest.packs.find((p) => p.id === packId) ?? null;
+ }
+
+ async renamePack(packId: string, input: { name?: string; description?: string; tags?: string[] }): Promise {
+ const manifest = await this.readManifest();
+ const pack = manifest.packs.find((p) => p.id === packId);
+ if (!pack) throw new NotFoundError(`Pack ${packId} not found`);
+ if (input.name !== undefined) pack.name = input.name;
+ if (input.description !== undefined) pack.description = input.description;
+ if (input.tags !== undefined) pack.tags = input.tags;
+ pack.updatedAt = nowIso();
+ await this.writeManifest(manifest);
+ return pack;
+ }
+
+ async movePack(packId: string, targetLibraryId: string): Promise {
+ const manifest = await this.readManifest();
+ const pack = manifest.packs.find((p) => p.id === packId);
+ if (!pack) throw new NotFoundError(`Pack ${packId} not found`);
+ const targetLib = manifest.libraries.find((l) => l.id === targetLibraryId);
+ if (!targetLib) throw new NotFoundError(`Library ${targetLibraryId} not found`);
+ pack.libraryId = targetLibraryId;
+ pack.updatedAt = nowIso();
+ await this.writeManifest(manifest);
+ return pack;
+ }
+
+ async softDeletePack(packId: string): Promise {
+ const manifest = await this.readManifest();
+ const pack = manifest.packs.find((p) => p.id === packId);
+ if (!pack) throw new NotFoundError(`Pack ${packId} not found`);
+ pack.deletedAt = nowIso();
+ pack.updatedAt = pack.deletedAt;
+ await this.writeManifest(manifest);
+ }
+
+ async restorePack(packId: string): Promise {
+ const manifest = await this.readManifest();
+ const pack = manifest.packs.find((p) => p.id === packId);
+ if (!pack) throw new NotFoundError(`Pack ${packId} not found`);
+ pack.deletedAt = null;
+ pack.updatedAt = nowIso();
+ await this.writeManifest(manifest);
+ return pack;
+ }
+
+ async searchPacks(libraryId: string, query: string): Promise {
+ const manifest = await this.readManifest();
+ const q = query.trim().toLowerCase();
+ if (!q) return manifest.packs.filter((p) => p.libraryId === libraryId && p.deletedAt === null);
+ return manifest.packs.filter((p) => {
+ if (p.libraryId !== libraryId || p.deletedAt !== null) return false;
+ if (p.name.toLowerCase().includes(q)) return true;
+ if (p.description.toLowerCase().includes(q)) return true;
+ if (p.tags.some((t) => t.toLowerCase().includes(q))) return true;
+ const skills = manifest.skills.filter((s) => s.packId === p.id && s.deletedAt === null);
+ if (skills.some((s) => s.name.toLowerCase().includes(q) || s.description.toLowerCase().includes(q))) {
+ return true;
+ }
+ return false;
+ });
+ }
+
+ // ── Fork ──────────────────────────────────────────────────────────────
+
+ async forkPack(sourcePackId: string, targetLibraryId: string, options?: { packKey?: string; createdBy?: string }): Promise {
+ const manifest = await this.readManifest();
+ const source = manifest.packs.find((p) => p.id === sourcePackId);
+ if (!source) throw new NotFoundError(`Source pack ${sourcePackId} not found`);
+
+ const targetLib = manifest.libraries.find((l) => l.id === targetLibraryId);
+ if (!targetLib) throw new NotFoundError(`Target library ${targetLibraryId} not found`);
+
+ const desiredKey = options?.packKey ?? source.packKey;
+ let key = desiredKey;
+ let suffix = 1;
+ while (manifest.packs.some((p) => p.libraryId === targetLibraryId && p.packKey === key && p.deletedAt === null)) {
+ key = `${desiredKey}-fork-${suffix++}`;
+ }
+
+ const now = nowIso();
+ const branchId = nanoid();
+ const fork: PackRecord = {
+ id: nanoid(),
+ libraryId: targetLibraryId,
+ packKey: key,
+ name: source.name,
+ description: source.description,
+ tags: [...source.tags],
+ basePackId: source.id,
+ external: source.external,
+ currentBranchId: branchId,
+ createdBy: options?.createdBy ?? "",
+ createdAt: now,
+ updatedAt: now,
+ deletedAt: null,
+ metadata: source.metadata ? { ...source.metadata } : undefined,
+ };
+ manifest.packs.push(fork);
+
+ const sourceDocs = manifest.documents.filter((d) => d.packId === source.id && d.deletedAt === null);
+ const docIdMap = new Map();
+ const headByDoc: Record = {};
+ const baseByDoc: Record = {};
+
+ for (const sourceDoc of sourceDocs) {
+ const newDocId = nanoid();
+ docIdMap.set(sourceDoc.id, newDocId);
+
+ const sourceContent = await this.readDocumentContent(sourceDoc.id, sourceDoc.currentVersionId);
+ const versionId = nanoid();
+ const contentKey = OBJECT_KEYS.documentVersionContent(newDocId, versionId);
+ const metadataKey = OBJECT_KEYS.documentVersionMetadata(newDocId, versionId);
+ const contentHash = computeContentHash(sourceContent ?? "");
+ const byteLength = Buffer.byteLength(sourceContent ?? "", "utf8");
+ await this.objects.putObject(contentKey, sourceContent ?? "", { immutable: true });
+ await this.objects.putObject(
+ metadataKey,
+ JSON.stringify({ contentHash, byteLength, createdAt: now, message: "fork" }, null, 2),
+ { immutable: true },
+ );
+
+ const newDoc: LibraryDocumentRecord = {
+ id: newDocId,
+ packId: fork.id,
+ role: sourceDoc.role,
+ path: sourceDoc.path,
+ currentVersionId: versionId,
+ createdBy: options?.createdBy ?? "",
+ createdAt: now,
+ updatedAt: now,
+ deletedAt: null,
+ };
+ manifest.documents.push(newDoc);
+ const newVersion: LibraryDocumentVersionRecord = {
+ id: versionId,
+ docId: newDocId,
+ packId: fork.id,
+ parentVersionId: null,
+ contentKey,
+ contentHash,
+ byteLength,
+ message: "fork",
+ createdBy: options?.createdBy ?? "",
+ createdAt: now,
+ };
+ manifest.versions.push(newVersion);
+ headByDoc[newDocId] = versionId;
+ baseByDoc[newDocId] = sourceDoc.currentVersionId;
+ }
+
+ const branch: BranchRecord = {
+ id: branchId,
+ packId: fork.id,
+ name: "main",
+ baseVersionByDocId: baseByDoc,
+ headVersionByDocId: headByDoc,
+ createdAt: now,
+ updatedAt: now,
+ };
+ manifest.branches.push(branch);
+
+ const sourceSkills = manifest.skills.filter((s) => s.packId === source.id && s.deletedAt === null);
+ for (const sourceSkill of sourceSkills) {
+ const mappedDocId = docIdMap.get(sourceSkill.entrypointDocId);
+ if (!mappedDocId) continue;
+ const newSkill: SkillRecord = {
+ id: nanoid(),
+ packId: fork.id,
+ skillKey: sourceSkill.skillKey,
+ name: sourceSkill.name,
+ description: sourceSkill.description,
+ entrypointDocId: mappedDocId,
+ createdBy: options?.createdBy ?? "",
+ createdAt: now,
+ updatedAt: now,
+ deletedAt: null,
+ deprecated: sourceSkill.deprecated,
+ metadata: sourceSkill.metadata ? { ...sourceSkill.metadata } : undefined,
+ };
+ manifest.skills.push(newSkill);
+ }
+
+ await this.writeManifest(manifest);
+ return fork;
+ }
+
+ // ── Documents ─────────────────────────────────────────────────────────
+
+ async createDocument(packId: string, input: CreateDocumentInput): Promise<{ document: LibraryDocumentRecord; version: LibraryDocumentVersionRecord }> {
+ const manifest = await this.readManifest();
+ const pack = manifest.packs.find((p) => p.id === packId);
+ if (!pack) throw new NotFoundError(`Pack ${packId} not found`);
+
+ const branch = manifest.branches.find((b) => b.id === pack.currentBranchId);
+ if (!branch) throw new Error(`Branch ${pack.currentBranchId} not found for pack ${packId}`);
+
+ const now = nowIso();
+ const docId = nanoid();
+ const versionId = nanoid();
+ const contentKey = OBJECT_KEYS.documentVersionContent(docId, versionId);
+ const metadataKey = OBJECT_KEYS.documentVersionMetadata(docId, versionId);
+ const contentHash = computeContentHash(input.content);
+ const byteLength = Buffer.byteLength(input.content, "utf8");
+
+ await this.objects.putObject(contentKey, input.content, { immutable: true });
+ await this.objects.putObject(
+ metadataKey,
+ JSON.stringify({ contentHash, byteLength, createdAt: now, message: input.message ?? "create" }, null, 2),
+ { immutable: true },
+ );
+
+ const document: LibraryDocumentRecord = {
+ id: docId,
+ packId,
+ role: input.role,
+ path: input.path,
+ currentVersionId: versionId,
+ createdBy: input.createdBy ?? "",
+ createdAt: now,
+ updatedAt: now,
+ deletedAt: null,
+ metadata: input.metadata,
+ };
+ const version: LibraryDocumentVersionRecord = {
+ id: versionId,
+ docId,
+ packId,
+ parentVersionId: null,
+ contentKey,
+ contentHash,
+ byteLength,
+ message: input.message ?? "create",
+ createdBy: input.createdBy ?? "",
+ createdAt: now,
+ metadata: input.metadata,
+ };
+ manifest.documents.push(document);
+ manifest.versions.push(version);
+ branch.headVersionByDocId[docId] = versionId;
+ branch.updatedAt = now;
+ pack.updatedAt = now;
+ await this.writeManifest(manifest);
+ return { document, version };
+ }
+
+ async listDocuments(packId: string, options?: { includeDeleted?: boolean }): Promise {
+ const manifest = await this.readManifest();
+ return manifest.documents.filter(
+ (d) => d.packId === packId && (options?.includeDeleted || d.deletedAt === null),
+ );
+ }
+
+ async listVersions(docId: string): Promise {
+ const manifest = await this.readManifest();
+ return manifest.versions
+ .filter((v) => v.docId === docId)
+ .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
+ }
+
+ async readDocumentContent(docId: string, versionId: string): Promise {
+ const manifest = await this.readManifest();
+ const version = manifest.versions.find((v) => v.id === versionId && v.docId === docId);
+ if (!version) return null;
+ return this.objects.getObjectAsString(version.contentKey);
+ }
+
+ async saveDocumentVersion(docId: string, input: SaveDocumentVersionInput): Promise {
+ const manifest = await this.readManifest();
+ const document = manifest.documents.find((d) => d.id === docId);
+ if (!document) throw new NotFoundError(`Document ${docId} not found`);
+ if (document.currentVersionId !== input.previousVersionId) {
+ throw new StaleVersionError(
+ `Stale previousVersionId. Expected ${document.currentVersionId}, got ${input.previousVersionId}`,
+ );
+ }
+
+ const now = nowIso();
+ const versionId = nanoid();
+ const contentKey = OBJECT_KEYS.documentVersionContent(docId, versionId);
+ const metadataKey = OBJECT_KEYS.documentVersionMetadata(docId, versionId);
+ const contentHash = computeContentHash(input.content);
+ const byteLength = Buffer.byteLength(input.content, "utf8");
+
+ await this.objects.putObject(contentKey, input.content, { immutable: true });
+ await this.objects.putObject(
+ metadataKey,
+ JSON.stringify({ contentHash, byteLength, createdAt: now, message: input.message ?? "save" }, null, 2),
+ { immutable: true },
+ );
+
+ const version: LibraryDocumentVersionRecord = {
+ id: versionId,
+ docId,
+ packId: document.packId,
+ parentVersionId: document.currentVersionId,
+ contentKey,
+ contentHash,
+ byteLength,
+ message: input.message ?? "save",
+ createdBy: input.createdBy ?? "",
+ createdAt: now,
+ metadata: input.metadata,
+ };
+
+ document.currentVersionId = versionId;
+ document.updatedAt = now;
+ manifest.versions.push(version);
+
+ const pack = manifest.packs.find((p) => p.id === document.packId);
+ if (pack) {
+ const branch = manifest.branches.find((b) => b.id === pack.currentBranchId);
+ if (branch) {
+ branch.headVersionByDocId[docId] = versionId;
+ branch.updatedAt = now;
+ }
+ pack.updatedAt = now;
+ }
+
+ await this.writeManifest(manifest);
+ return version;
+ }
+
+ async renameDocument(docId: string, newPath: string): Promise {
+ const manifest = await this.readManifest();
+ const document = manifest.documents.find((d) => d.id === docId);
+ if (!document) throw new NotFoundError(`Document ${docId} not found`);
+ document.path = newPath;
+ document.updatedAt = nowIso();
+ await this.writeManifest(manifest);
+ return document;
+ }
+
+ async moveDocument(docId: string, newPath: string): Promise {
+ return this.renameDocument(docId, newPath);
+ }
+
+ async softDeleteDocument(docId: string): Promise {
+ const manifest = await this.readManifest();
+ const document = manifest.documents.find((d) => d.id === docId);
+ if (!document) throw new NotFoundError(`Document ${docId} not found`);
+ document.deletedAt = nowIso();
+ document.updatedAt = document.deletedAt;
+ await this.writeManifest(manifest);
+ }
+
+ async restoreDocument(docId: string): Promise {
+ const manifest = await this.readManifest();
+ const document = manifest.documents.find((d) => d.id === docId);
+ if (!document) throw new NotFoundError(`Document ${docId} not found`);
+ document.deletedAt = null;
+ document.updatedAt = nowIso();
+ await this.writeManifest(manifest);
+ return document;
+ }
+
+ // ── Skills ────────────────────────────────────────────────────────────
+
+ async createSkill(packId: string, input: CreateSkillInput): Promise {
+ const manifest = await this.readManifest();
+ const pack = manifest.packs.find((p) => p.id === packId);
+ if (!pack) throw new NotFoundError(`Pack ${packId} not found`);
+
+ const dup = manifest.skills.find(
+ (s) => s.packId === packId && s.skillKey === input.skillKey && s.deletedAt === null,
+ );
+ if (dup) throw new Error(`Skill key "${input.skillKey}" already exists in pack`);
+
+ const document = manifest.documents.find((d) => d.id === input.entrypointDocId);
+ if (!document) throw new NotFoundError(`Entrypoint document ${input.entrypointDocId} not found`);
+
+ const now = nowIso();
+ const skill: SkillRecord = {
+ id: nanoid(),
+ packId,
+ skillKey: input.skillKey,
+ name: input.name,
+ description: input.description,
+ entrypointDocId: input.entrypointDocId,
+ createdBy: input.createdBy ?? "",
+ createdAt: now,
+ updatedAt: now,
+ deletedAt: null,
+ deprecated: false,
+ metadata: input.metadata,
+ };
+ manifest.skills.push(skill);
+ pack.updatedAt = now;
+ await this.writeManifest(manifest);
+ return skill;
+ }
+
+ async listSkills(packId: string): Promise {
+ const manifest = await this.readManifest();
+ return manifest.skills.filter((s) => s.packId === packId && s.deletedAt === null);
+ }
+
+ async updateSkill(skillId: string, patch: Partial>): Promise {
+ const manifest = await this.readManifest();
+ const skill = manifest.skills.find((s) => s.id === skillId);
+ if (!skill) throw new NotFoundError(`Skill ${skillId} not found`);
+ Object.assign(skill, patch);
+ skill.updatedAt = nowIso();
+ await this.writeManifest(manifest);
+ return skill;
+ }
+
+ async softDeleteSkill(skillId: string): Promise {
+ const manifest = await this.readManifest();
+ const skill = manifest.skills.find((s) => s.id === skillId);
+ if (!skill) throw new NotFoundError(`Skill ${skillId} not found`);
+ skill.deletedAt = nowIso();
+ skill.updatedAt = skill.deletedAt;
+ await this.writeManifest(manifest);
+ }
+
+ // ── Validation helper ────────────────────────────────────────────────
+
+ async validatePackById(packId: string): Promise {
+ const manifest = await this.readManifest();
+ const pack = manifest.packs.find((p) => p.id === packId);
+ if (!pack) throw new NotFoundError(`Pack ${packId} not found`);
+ const skills = manifest.skills.filter((s) => s.packId === packId);
+ const documents = manifest.documents.filter((d) => d.packId === packId);
+ const documentContents = new Map();
+ for (const d of documents.filter((doc) => doc.deletedAt === null)) {
+ const content = await this.readDocumentContent(d.id, d.currentVersionId);
+ documentContents.set(d.id, content ?? "");
+ }
+ const unresolved = manifest.merges
+ .filter((m) => m.packId === packId && m.status === "conflict")
+ .map((m) => m.id);
+ return validatePack({ pack, skills, documents, documentContents, unresolvedMergeIds: unresolved });
+ }
+
+ // ── Publish ───────────────────────────────────────────────────────────
+
+ async publishPackVersion(packId: string, input: PublishPackVersionInput): Promise {
+ const warnings = await this.validatePackById(packId);
+ const errors = warnings.filter((w) => w.level === "error");
+ if (errors.length > 0) {
+ throw new ValidationError(`Pack has ${errors.length} validation error(s)`, errors);
+ }
+
+ const manifest = await this.readManifest();
+ const pack = manifest.packs.find((p) => p.id === packId);
+ if (!pack) throw new NotFoundError(`Pack ${packId} not found`);
+ const library = manifest.libraries.find((l) => l.id === pack.libraryId);
+ if (!library) throw new NotFoundError(`Library ${pack.libraryId} not found`);
+ const dup = manifest.packVersions.find((pv) => pv.packId === packId && pv.version === input.version);
+ if (dup) throw new Error(`Pack version ${input.version} already exists`);
+
+ const skills = manifest.skills.filter((s) => s.packId === packId && s.deletedAt === null);
+ const documents = manifest.documents.filter((d) => d.packId === packId && d.deletedAt === null);
+
+ const now = nowIso();
+ const versionId = nanoid();
+ const manifestObj = buildManifest({ pack, skills, documents, scope: library.scope, version: input.version });
+ const manifestJson = JSON.stringify(manifestObj, null, 2);
+ const manifestKey = OBJECT_KEYS.packVersionManifest(packId, versionId);
+ const manifestHash = computeContentHash(manifestJson);
+ await this.objects.putObject(manifestKey, manifestJson, { immutable: true });
+
+ const packVersion: PackVersionRecord = {
+ id: versionId,
+ packId,
+ version: input.version,
+ manifestKey,
+ manifestHash,
+ createdBy: input.createdBy ?? "",
+ createdAt: now,
+ deprecated: false,
+ deletedAt: null,
+ notes: input.notes ?? "",
+ };
+ manifest.packVersions.push(packVersion);
+
+ for (const doc of documents) {
+ const record: PackVersionDocumentRecord = {
+ id: nanoid(),
+ packVersionId: versionId,
+ packId,
+ docId: doc.id,
+ versionId: doc.currentVersionId,
+ role: doc.role,
+ path: doc.path,
+ contentHash: manifest.versions.find((v) => v.id === doc.currentVersionId)?.contentHash ?? "",
+ };
+ manifest.packVersionDocuments.push(record);
+ }
+
+ pack.updatedAt = now;
+ await this.writeManifest(manifest);
+ return packVersion;
+ }
+
+ async listPackVersions(packId: string): Promise {
+ const manifest = await this.readManifest();
+ return manifest.packVersions
+ .filter((pv) => pv.packId === packId)
+ .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
+ }
+
+ async deprecatePackVersion(packVersionId: string, deprecated: boolean): Promise {
+ const manifest = await this.readManifest();
+ const pv = manifest.packVersions.find((p) => p.id === packVersionId);
+ if (!pv) throw new NotFoundError(`Pack version ${packVersionId} not found`);
+ pv.deprecated = deprecated;
+ await this.writeManifest(manifest);
+ return pv;
+ }
+
+ async publishSkillVersion(skillId: string, input: PublishSkillVersionInput): Promise {
+ const manifest = await this.readManifest();
+ const skill = manifest.skills.find((s) => s.id === skillId);
+ if (!skill) throw new NotFoundError(`Skill ${skillId} not found`);
+ const dup = manifest.skillVersions.find((sv) => sv.skillId === skillId && sv.version === input.version);
+ if (dup) throw new Error(`Skill version ${input.version} already exists`);
+
+ const now = nowIso();
+ const versionId = nanoid();
+ let packVersionId: string | null = null;
+ if (input.linkToLatestPackVersion) {
+ const latest = manifest.packVersions
+ .filter((pv) => pv.packId === skill.packId && pv.deletedAt === null)
+ .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime())[0];
+ packVersionId = latest?.id ?? null;
+ }
+
+ const skillVersion: SkillVersionRecord = {
+ id: versionId,
+ skillId,
+ packId: skill.packId,
+ version: input.version,
+ packVersionId,
+ createdBy: input.createdBy ?? "",
+ createdAt: now,
+ deprecated: false,
+ deletedAt: null,
+ notes: input.notes ?? "",
+ };
+ manifest.skillVersions.push(skillVersion);
+
+ const entrypoint = manifest.documents.find((d) => d.id === skill.entrypointDocId);
+ if (entrypoint) {
+ const docRecord: SkillVersionDocumentRecord = {
+ id: nanoid(),
+ skillVersionId: versionId,
+ skillId,
+ docId: entrypoint.id,
+ versionId: entrypoint.currentVersionId,
+ contentHash: manifest.versions.find((v) => v.id === entrypoint.currentVersionId)?.contentHash ?? "",
+ };
+ manifest.skillVersionDocuments.push(docRecord);
+ }
+ const refs = manifest.documents.filter(
+ (d) => d.packId === skill.packId && d.role === "reference" && d.deletedAt === null,
+ );
+ for (const r of refs) {
+ manifest.skillVersionDocuments.push({
+ id: nanoid(),
+ skillVersionId: versionId,
+ skillId,
+ docId: r.id,
+ versionId: r.currentVersionId,
+ contentHash: manifest.versions.find((v) => v.id === r.currentVersionId)?.contentHash ?? "",
+ });
+ }
+
+ skill.updatedAt = now;
+ await this.writeManifest(manifest);
+ return skillVersion;
+ }
+
+ async listSkillVersions(skillId: string): Promise {
+ const manifest = await this.readManifest();
+ return manifest.skillVersions
+ .filter((sv) => sv.skillId === skillId)
+ .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
+ }
+
+ // ── Resolve ──────────────────────────────────────────────────────────
+
+ async resolveLive(input: ResolveLiveInput): Promise {
+ const manifest = await this.readManifest();
+ const pack = manifest.packs.find((p) => p.id === input.packId);
+ if (!pack) return null;
+ const library = manifest.libraries.find((l) => l.id === pack.libraryId);
+ if (!library || library.scope !== input.scope) return null;
+ const skill = manifest.skills.find((s) => s.id === input.skillId && s.packId === pack.id);
+ if (!skill) return null;
+
+ if (input.packVersion === "draft") {
+ const entrypoint = manifest.documents.find((d) => d.id === skill.entrypointDocId);
+ if (!entrypoint) return null;
+ const refs = manifest.documents.filter(
+ (d) => d.packId === pack.id && d.role === "reference" && d.deletedAt === null,
+ );
+ const entrypointContent = (await this.readDocumentContent(entrypoint.id, entrypoint.currentVersionId)) ?? "";
+ const entryDoc: SkillBundleDocument = {
+ docId: entrypoint.id,
+ path: entrypoint.path,
+ role: entrypoint.role,
+ content: entrypointContent,
+ contentHash: computeContentHash(entrypointContent),
+ };
+ const refDocs: SkillBundleDocument[] = [];
+ for (const r of refs) {
+ const c = (await this.readDocumentContent(r.id, r.currentVersionId)) ?? "";
+ refDocs.push({
+ docId: r.id,
+ path: r.path,
+ role: r.role,
+ content: c,
+ contentHash: computeContentHash(c),
+ });
+ }
+ return {
+ scope: library.scope,
+ packId: pack.id,
+ packKey: pack.packKey,
+ packVersion: "draft",
+ skillId: skill.id,
+ skillKey: skill.skillKey,
+ skillName: skill.name,
+ description: skill.description,
+ entrypoint: entryDoc,
+ documents: refDocs,
+ manifestHash: "",
+ };
+ }
+
+ const packVersion = manifest.packVersions.find(
+ (pv) => pv.packId === pack.id && pv.version === input.packVersion,
+ );
+ if (!packVersion) return null;
+ const versionDocs = manifest.packVersionDocuments.filter((pvd) => pvd.packVersionId === packVersion.id);
+ const entryRef = versionDocs.find((vd) => vd.docId === skill.entrypointDocId);
+ if (!entryRef) return null;
+ const entryContent = (await this.readDocumentContent(entryRef.docId, entryRef.versionId)) ?? "";
+ const entryDoc: SkillBundleDocument = {
+ docId: entryRef.docId,
+ path: entryRef.path,
+ role: entryRef.role,
+ content: entryContent,
+ contentHash: entryRef.contentHash || computeContentHash(entryContent),
+ };
+ const refDocs: SkillBundleDocument[] = [];
+ for (const vd of versionDocs.filter((d) => d.role === "reference" && d.docId !== entryRef.docId)) {
+ const c = (await this.readDocumentContent(vd.docId, vd.versionId)) ?? "";
+ refDocs.push({
+ docId: vd.docId,
+ path: vd.path,
+ role: vd.role,
+ content: c,
+ contentHash: vd.contentHash || computeContentHash(c),
+ });
+ }
+ return {
+ scope: library.scope,
+ packId: pack.id,
+ packKey: pack.packKey,
+ packVersion: packVersion.version,
+ skillId: skill.id,
+ skillKey: skill.skillKey,
+ skillName: skill.name,
+ description: skill.description,
+ entrypoint: entryDoc,
+ documents: refDocs,
+ manifestHash: packVersion.manifestHash,
+ };
+ }
+
+ // ── Branch / merge ───────────────────────────────────────────────────
+
+ async getForkState(packId: string): Promise<{ behind: boolean; conflict: boolean; basePackId: string | null }> {
+ const manifest = await this.readManifest();
+ const pack = manifest.packs.find((p) => p.id === packId);
+ if (!pack || !pack.basePackId) return { behind: false, conflict: false, basePackId: null };
+ const branch = manifest.branches.find((b) => b.id === pack.currentBranchId);
+ if (!branch) return { behind: false, conflict: false, basePackId: pack.basePackId };
+ const baseDocs = manifest.documents.filter((d) => d.packId === pack.basePackId && d.deletedAt === null);
+
+ const conflict = manifest.merges.some(
+ (m) => m.packId === packId && m.status === "conflict",
+ );
+ let behind = false;
+ for (const baseDoc of baseDocs) {
+ const ourDoc = manifest.documents.find((d) => d.packId === packId && d.path === baseDoc.path && d.deletedAt === null);
+ if (!ourDoc) {
+ behind = true;
+ break;
+ }
+ const baseFromBranch = branch.baseVersionByDocId[ourDoc.id];
+ if (baseFromBranch && baseFromBranch !== baseDoc.currentVersionId) {
+ behind = true;
+ break;
+ }
+ }
+ return { behind, conflict, basePackId: pack.basePackId };
+ }
+
+ async mergeBaseIntoBranch(packId: string, options?: { initiatedBy?: string }): Promise {
+ const manifest = await this.readManifest();
+ const pack = manifest.packs.find((p) => p.id === packId);
+ if (!pack) throw new NotFoundError(`Pack ${packId} not found`);
+ if (!pack.basePackId) throw new Error("Pack is not a fork");
+ const branch = manifest.branches.find((b) => b.id === pack.currentBranchId);
+ if (!branch) throw new Error(`Branch ${pack.currentBranchId} not found`);
+
+ const now = nowIso();
+ const mergeId = nanoid();
+ const cleanlyMerged: string[] = [];
+ const conflictDocs: string[] = [];
+ const conflictRecords: ConflictRecord[] = [];
+
+ const baseDocs = manifest.documents.filter(
+ (d) => d.packId === pack.basePackId && d.deletedAt === null,
+ );
+
+ for (const baseDoc of baseDocs) {
+ const ourDoc = manifest.documents.find(
+ (d) => d.packId === packId && d.path === baseDoc.path && d.deletedAt === null,
+ );
+ if (!ourDoc) continue;
+ const ancestorVersionId = branch.baseVersionByDocId[ourDoc.id];
+ if (ancestorVersionId === baseDoc.currentVersionId) {
+ cleanlyMerged.push(ourDoc.id);
+ continue;
+ }
+ const ancestor = ancestorVersionId
+ ? (await this.readDocumentContent(ourDoc.id, ancestorVersionId))
+ ?? (await this.readDocumentContent(baseDoc.id, ancestorVersionId))
+ ?? ""
+ : "";
+ const theirs = (await this.readDocumentContent(baseDoc.id, baseDoc.currentVersionId)) ?? "";
+ const yours = (await this.readDocumentContent(ourDoc.id, ourDoc.currentVersionId)) ?? "";
+
+ const merged = threeWayTextMerge(ancestor, theirs, yours);
+ if (merged.cleanlyMerged) {
+ const versionId = nanoid();
+ const contentKey = OBJECT_KEYS.documentVersionContent(ourDoc.id, versionId);
+ const metadataKey = OBJECT_KEYS.documentVersionMetadata(ourDoc.id, versionId);
+ const contentHash = computeContentHash(merged.content);
+ const byteLength = Buffer.byteLength(merged.content, "utf8");
+ await this.objects.putObject(contentKey, merged.content, { immutable: true });
+ await this.objects.putObject(
+ metadataKey,
+ JSON.stringify({ contentHash, byteLength, createdAt: now, message: "merge-base" }, null, 2),
+ { immutable: true },
+ );
+ const newVersion: LibraryDocumentVersionRecord = {
+ id: versionId,
+ docId: ourDoc.id,
+ packId,
+ parentVersionId: ourDoc.currentVersionId,
+ contentKey,
+ contentHash,
+ byteLength,
+ message: "merge-base",
+ createdBy: options?.initiatedBy ?? "",
+ createdAt: now,
+ };
+ manifest.versions.push(newVersion);
+ ourDoc.currentVersionId = versionId;
+ ourDoc.updatedAt = now;
+ branch.headVersionByDocId[ourDoc.id] = versionId;
+ branch.baseVersionByDocId[ourDoc.id] = baseDoc.currentVersionId;
+ cleanlyMerged.push(ourDoc.id);
+ } else {
+ conflictDocs.push(ourDoc.id);
+ for (const conflict of merged.conflicts) {
+ conflictRecords.push({
+ id: nanoid(),
+ mergeId,
+ packId,
+ docId: ourDoc.id,
+ conflictType: conflict.conflictType,
+ ancestorContent: conflict.ancestor,
+ baseContent: conflict.base,
+ branchContent: conflict.branch,
+ resolved: false,
+ resolvedAt: null,
+ resolvedBy: null,
+ resolutionContent: null,
+ });
+ }
+ }
+ }
+
+ const status: MergeRecord["status"] = conflictDocs.length === 0 ? "clean" : "conflict";
+ const merge: MergeRecord = {
+ id: mergeId,
+ packId,
+ branchId: branch.id,
+ basePackVersionId: null,
+ initiatedBy: options?.initiatedBy ?? "",
+ initiatedAt: now,
+ status,
+ mergedCleanlyDocs: cleanlyMerged,
+ conflictDocs,
+ completedAt: status === "clean" ? now : null,
+ };
+ manifest.merges.push(merge);
+ manifest.conflicts.push(...conflictRecords);
+ branch.updatedAt = now;
+ pack.updatedAt = now;
+ await this.writeManifest(manifest);
+ return merge;
+ }
+
+ async listConflicts(mergeId: string): Promise {
+ const manifest = await this.readManifest();
+ return manifest.conflicts.filter((c) => c.mergeId === mergeId);
+ }
+
+ async resolveMergeConflict(
+ mergeId: string,
+ input: { resolvedContentByDocId: Record; resolvedBy?: string },
+ ): Promise {
+ const manifest = await this.readManifest();
+ const merge = manifest.merges.find((m) => m.id === mergeId);
+ if (!merge) throw new NotFoundError(`Merge ${mergeId} not found`);
+ const branch = manifest.branches.find((b) => b.id === merge.branchId);
+ if (!branch) throw new Error(`Branch ${merge.branchId} not found`);
+ const pack = manifest.packs.find((p) => p.id === merge.packId);
+ if (!pack || !pack.basePackId) throw new Error(`Pack ${merge.packId} is not a fork`);
+
+ const now = nowIso();
+ for (const [docId, content] of Object.entries(input.resolvedContentByDocId)) {
+ const ourDoc = manifest.documents.find((d) => d.id === docId);
+ if (!ourDoc) continue;
+ const versionId = nanoid();
+ const contentKey = OBJECT_KEYS.documentVersionContent(docId, versionId);
+ const metadataKey = OBJECT_KEYS.documentVersionMetadata(docId, versionId);
+ const contentHash = computeContentHash(content);
+ const byteLength = Buffer.byteLength(content, "utf8");
+ await this.objects.putObject(contentKey, content, { immutable: true });
+ await this.objects.putObject(
+ metadataKey,
+ JSON.stringify({ contentHash, byteLength, createdAt: now, message: "resolve-conflict" }, null, 2),
+ { immutable: true },
+ );
+ const newVersion: LibraryDocumentVersionRecord = {
+ id: versionId,
+ docId,
+ packId: merge.packId,
+ parentVersionId: ourDoc.currentVersionId,
+ contentKey,
+ contentHash,
+ byteLength,
+ message: "resolve-conflict",
+ createdBy: input.resolvedBy ?? "",
+ createdAt: now,
+ };
+ manifest.versions.push(newVersion);
+ ourDoc.currentVersionId = versionId;
+ ourDoc.updatedAt = now;
+ branch.headVersionByDocId[docId] = versionId;
+ const baseDoc = manifest.documents.find(
+ (d) => d.packId === pack.basePackId && d.path === ourDoc.path && d.deletedAt === null,
+ );
+ if (baseDoc) {
+ branch.baseVersionByDocId[docId] = baseDoc.currentVersionId;
+ }
+
+ for (const conflict of manifest.conflicts.filter((c) => c.mergeId === mergeId && c.docId === docId)) {
+ conflict.resolved = true;
+ conflict.resolvedAt = now;
+ conflict.resolvedBy = input.resolvedBy ?? "";
+ conflict.resolutionContent = content;
+ }
+ }
+
+ const stillUnresolved = manifest.conflicts.some((c) => c.mergeId === mergeId && !c.resolved);
+ if (!stillUnresolved) {
+ merge.status = "resolved";
+ merge.completedAt = now;
+ }
+ branch.updatedAt = now;
+ pack.updatedAt = now;
+ await this.writeManifest(manifest);
+ return merge;
+ }
+
+ // ── Compare drafts to published ──────────────────────────────────────
+
+ async compareDraftToPublished(packId: string, packVersionId: string): Promise<{ docId: string; status: "added" | "modified" | "removed" | "unchanged" }[]> {
+ const manifest = await this.readManifest();
+ const pv = manifest.packVersions.find((p) => p.id === packVersionId && p.packId === packId);
+ if (!pv) throw new NotFoundError(`Pack version ${packVersionId} not found`);
+ const pvDocs = manifest.packVersionDocuments.filter((d) => d.packVersionId === packVersionId);
+ const pvByDocId = new Map(pvDocs.map((d) => [d.docId, d]));
+ const currentDocs = manifest.documents.filter((d) => d.packId === packId);
+ const result: { docId: string; status: "added" | "modified" | "removed" | "unchanged" }[] = [];
+ for (const doc of currentDocs) {
+ const pvDoc = pvByDocId.get(doc.id);
+ if (doc.deletedAt !== null && pvDoc) {
+ result.push({ docId: doc.id, status: "removed" });
+ } else if (!pvDoc && doc.deletedAt === null) {
+ result.push({ docId: doc.id, status: "added" });
+ } else if (pvDoc && pvDoc.versionId !== doc.currentVersionId) {
+ result.push({ docId: doc.id, status: "modified" });
+ } else if (pvDoc) {
+ result.push({ docId: doc.id, status: "unchanged" });
+ }
+ }
+ return result;
+ }
+}
+
+let singleton: LibraryStore | null = null;
+
+export function getLibraryStore(): LibraryStore {
+ singleton ??= new LibraryStore();
+ return singleton;
+}
+
+export function resetLibraryStoreForTests(): void {
+ singleton = null;
+}
+
+export { parseSkillFrontmatter };
diff --git a/src/lib/library-store/types.ts b/src/lib/library-store/types.ts
new file mode 100644
index 0000000..19454cd
--- /dev/null
+++ b/src/lib/library-store/types.ts
@@ -0,0 +1,267 @@
+export type LibraryScope = "workspace" | "user";
+
+export type DocumentRole =
+ | "skill-entrypoint"
+ | "reference"
+ | "doc"
+ | "rule"
+ | "template"
+ | "example"
+ | "asset"
+ | "script"
+ | "manifest";
+
+export interface LibraryRecord {
+ id: string;
+ workspaceId: string;
+ scope: LibraryScope;
+ ownerUserId: string | null;
+ createdAt: string;
+ updatedAt: string;
+ deletedAt: string | null;
+}
+
+export interface PackRecord {
+ id: string;
+ libraryId: string;
+ packKey: string;
+ name: string;
+ description: string;
+ tags: string[];
+ basePackId: string | null;
+ external: boolean;
+ currentBranchId: string;
+ createdBy: string;
+ createdAt: string;
+ updatedAt: string;
+ deletedAt: string | null;
+ metadata?: Record;
+}
+
+export interface SkillRecord {
+ id: string;
+ packId: string;
+ skillKey: string;
+ name: string;
+ description: string;
+ entrypointDocId: string;
+ createdBy: string;
+ createdAt: string;
+ updatedAt: string;
+ deletedAt: string | null;
+ deprecated: boolean;
+ metadata?: Record;
+}
+
+export interface LibraryDocumentRecord {
+ id: string;
+ packId: string;
+ role: DocumentRole;
+ path: string;
+ currentVersionId: string;
+ createdBy: string;
+ createdAt: string;
+ updatedAt: string;
+ deletedAt: string | null;
+ metadata?: Record;
+}
+
+export interface LibraryDocumentVersionRecord {
+ id: string;
+ docId: string;
+ packId: string;
+ parentVersionId: string | null;
+ contentKey: string;
+ contentHash: string;
+ byteLength: number;
+ message: string;
+ createdBy: string;
+ createdAt: string;
+ metadata?: Record;
+}
+
+export interface PackVersionRecord {
+ id: string;
+ packId: string;
+ version: string;
+ manifestKey: string;
+ manifestHash: string;
+ createdBy: string;
+ createdAt: string;
+ deprecated: boolean;
+ deletedAt: string | null;
+ notes: string;
+}
+
+export interface PackVersionDocumentRecord {
+ id: string;
+ packVersionId: string;
+ packId: string;
+ docId: string;
+ versionId: string;
+ role: DocumentRole;
+ path: string;
+ contentHash: string;
+}
+
+export interface SkillVersionRecord {
+ id: string;
+ skillId: string;
+ packId: string;
+ version: string;
+ packVersionId: string | null;
+ createdBy: string;
+ createdAt: string;
+ deprecated: boolean;
+ deletedAt: string | null;
+ notes: string;
+}
+
+export interface SkillVersionDocumentRecord {
+ id: string;
+ skillVersionId: string;
+ skillId: string;
+ docId: string;
+ versionId: string;
+ contentHash: string;
+}
+
+export interface BranchRecord {
+ id: string;
+ packId: string;
+ name: string;
+ baseVersionByDocId: Record;
+ headVersionByDocId: Record;
+ createdAt: string;
+ updatedAt: string;
+}
+
+export interface ConflictRecord {
+ id: string;
+ mergeId: string;
+ packId: string;
+ docId: string;
+ conflictType: "text_conflict" | "delete_edit" | "add_add";
+ ancestorContent: string | null;
+ baseContent: string | null;
+ branchContent: string | null;
+ resolved: boolean;
+ resolvedAt: string | null;
+ resolvedBy: string | null;
+ resolutionContent: string | null;
+}
+
+export interface MergeRecord {
+ id: string;
+ packId: string;
+ branchId: string;
+ basePackVersionId: string | null;
+ initiatedBy: string;
+ initiatedAt: string;
+ status: "clean" | "conflict" | "resolved";
+ mergedCleanlyDocs: string[];
+ conflictDocs: string[];
+ completedAt: string | null;
+}
+
+export interface LibraryManifest {
+ version: 1;
+ libraries: LibraryRecord[];
+ packs: PackRecord[];
+ skills: SkillRecord[];
+ documents: LibraryDocumentRecord[];
+ versions: LibraryDocumentVersionRecord[];
+ packVersions: PackVersionRecord[];
+ packVersionDocuments: PackVersionDocumentRecord[];
+ skillVersions: SkillVersionRecord[];
+ skillVersionDocuments: SkillVersionDocumentRecord[];
+ branches: BranchRecord[];
+ merges: MergeRecord[];
+ conflicts: ConflictRecord[];
+}
+
+export interface CreatePackInput {
+ packKey: string;
+ name: string;
+ description?: string;
+ tags?: string[];
+ createdBy?: string;
+ metadata?: Record;
+}
+
+export interface CreateDocumentInput {
+ role: DocumentRole;
+ path: string;
+ content: string;
+ createdBy?: string;
+ message?: string;
+ metadata?: Record;
+}
+
+export interface SaveDocumentVersionInput {
+ content: string;
+ previousVersionId: string | null;
+ message?: string;
+ createdBy?: string;
+ metadata?: Record;
+}
+
+export interface CreateSkillInput {
+ skillKey: string;
+ name: string;
+ description: string;
+ entrypointDocId: string;
+ createdBy?: string;
+ metadata?: Record;
+}
+
+export interface PublishPackVersionInput {
+ version: string;
+ notes?: string;
+ createdBy?: string;
+}
+
+export interface PublishSkillVersionInput {
+ version: string;
+ notes?: string;
+ createdBy?: string;
+ linkToLatestPackVersion?: boolean;
+}
+
+export interface ResolveLiveInput {
+ scope: LibraryScope;
+ packId: string;
+ packVersion: string | "draft";
+ skillId: string;
+}
+
+export interface SkillBundleDocument {
+ docId: string;
+ path: string;
+ role: DocumentRole;
+ content: string;
+ contentHash: string;
+}
+
+export interface SkillBundle {
+ scope: LibraryScope;
+ packId: string;
+ packKey: string;
+ packVersion: string;
+ skillId: string;
+ skillKey: string;
+ skillName: string;
+ description: string;
+ entrypoint: SkillBundleDocument;
+ documents: SkillBundleDocument[];
+ manifestHash: string;
+}
+
+export interface ValidationWarning {
+ level: "warning" | "error";
+ code: string;
+ message: string;
+ path?: string;
+ docId?: string;
+ skillId?: string;
+}
diff --git a/src/lib/library-store/validation.ts b/src/lib/library-store/validation.ts
new file mode 100644
index 0000000..06326e7
--- /dev/null
+++ b/src/lib/library-store/validation.ts
@@ -0,0 +1,210 @@
+import { skillFrontmatterSchema } from "./schemas";
+import type {
+ LibraryDocumentRecord,
+ PackRecord,
+ SkillRecord,
+ ValidationWarning,
+} from "./types";
+
+export interface ParsedFrontmatter {
+ data: Record | null;
+ body: string;
+ raw: string;
+}
+
+const FRONTMATTER_REGEX = /^---\s*\n([\s\S]*?)\n---\s*\n?([\s\S]*)$/;
+
+function parseSimpleYaml(yaml: string): Record {
+ const result: Record = {};
+ let currentKey: string | null = null;
+ let currentObj: Record | null = null;
+ for (const rawLine of yaml.split(/\r?\n/)) {
+ const line = rawLine.replace(/\s+$/, "");
+ if (!line.trim() || line.trim().startsWith("#")) continue;
+
+ const indented = /^\s+/.test(rawLine);
+ if (!indented) {
+ currentObj = null;
+ const m = /^([A-Za-z0-9_\-]+)\s*:\s*(.*)$/.exec(line);
+ if (!m) continue;
+ const key = m[1];
+ const value = m[2].trim();
+ currentKey = key;
+ if (value === "") {
+ const nested: Record = {};
+ result[key] = nested;
+ currentObj = nested;
+ } else {
+ result[key] = stripQuotes(value);
+ }
+ } else if (currentKey && currentObj) {
+ const m = /^\s+([A-Za-z0-9_\-]+)\s*:\s*(.*)$/.exec(rawLine);
+ if (m) {
+ currentObj[m[1]] = stripQuotes(m[2].trim());
+ }
+ }
+ }
+ return result;
+}
+
+function stripQuotes(value: string): string {
+ if ((value.startsWith('"') && value.endsWith('"')) || (value.startsWith("'") && value.endsWith("'"))) {
+ return value.slice(1, -1);
+ }
+ return value;
+}
+
+export function parseFrontmatter(content: string): ParsedFrontmatter {
+ const match = FRONTMATTER_REGEX.exec(content);
+ if (!match) {
+ return { data: null, body: content, raw: "" };
+ }
+ return {
+ data: parseSimpleYaml(match[1]),
+ body: match[2],
+ raw: match[1],
+ };
+}
+
+export function parseSkillFrontmatter(content: string): {
+ data: ReturnType;
+ body: string;
+} {
+ const parsed = parseFrontmatter(content);
+ if (!parsed.data) {
+ return {
+ data: skillFrontmatterSchema.safeParse({}),
+ body: parsed.body,
+ };
+ }
+ return {
+ data: skillFrontmatterSchema.safeParse(parsed.data),
+ body: parsed.body,
+ };
+}
+
+export interface ValidatePackInput {
+ pack: PackRecord;
+ skills: SkillRecord[];
+ documents: LibraryDocumentRecord[];
+ documentContents: Map;
+ unresolvedMergeIds?: string[];
+}
+
+const RELATIVE_LINK_REGEX = /(?:!?\[[^\]]*\]\(\s*)([^)\s]+)(?:[^)]*\))/g;
+
+export function validatePack(input: ValidatePackInput): ValidationWarning[] {
+ const warnings: ValidationWarning[] = [];
+ const { pack, skills, documents, documentContents, unresolvedMergeIds = [] } = input;
+ const docById = new Map(documents.map((d) => [d.id, d]));
+ const docByPath = new Map(documents.filter((d) => d.deletedAt === null).map((d) => [d.path, d]));
+
+ const skillKeys = new Set();
+ for (const skill of skills.filter((s) => s.deletedAt === null)) {
+ if (skillKeys.has(skill.skillKey)) {
+ warnings.push({
+ level: "error",
+ code: "duplicate_skill_id",
+ message: `Duplicate skill key: ${skill.skillKey}`,
+ skillId: skill.id,
+ });
+ }
+ skillKeys.add(skill.skillKey);
+
+ const entry = docById.get(skill.entrypointDocId);
+ if (!entry || entry.deletedAt !== null) {
+ warnings.push({
+ level: "error",
+ code: "missing_entrypoint",
+ message: `Skill "${skill.name}" is missing its SKILL.md entrypoint document`,
+ skillId: skill.id,
+ });
+ continue;
+ }
+
+ const content = documentContents.get(entry.id) ?? "";
+ const fm = parseSkillFrontmatter(content);
+ if (!fm.data.success) {
+ warnings.push({
+ level: "error",
+ code: "invalid_frontmatter",
+ message: `Skill "${skill.name}" has invalid SKILL.md frontmatter: ${fm.data.error.issues[0]?.message ?? "unknown"}`,
+ skillId: skill.id,
+ docId: entry.id,
+ path: entry.path,
+ });
+ } else if (!fm.data.data.description?.trim()) {
+ warnings.push({
+ level: "warning",
+ code: "missing_description",
+ message: `Skill "${skill.name}" has no description in frontmatter`,
+ skillId: skill.id,
+ docId: entry.id,
+ path: entry.path,
+ });
+ }
+ }
+
+ const docPaths = new Set();
+ for (const doc of documents.filter((d) => d.deletedAt === null)) {
+ if (docPaths.has(doc.path)) {
+ warnings.push({
+ level: "warning",
+ code: "duplicate_document_path",
+ message: `Duplicate document path: ${doc.path}`,
+ docId: doc.id,
+ path: doc.path,
+ });
+ }
+ docPaths.add(doc.path);
+
+ const content = documentContents.get(doc.id) ?? "";
+ let m: RegExpExecArray | null;
+ RELATIVE_LINK_REGEX.lastIndex = 0;
+ while ((m = RELATIVE_LINK_REGEX.exec(content)) !== null) {
+ const target = m[1];
+ if (/^https?:/i.test(target) || target.startsWith("#") || target.startsWith("data:")) continue;
+ const normalized = target.replace(/^\.\//, "");
+ if (!docByPath.has(normalized)) {
+ warnings.push({
+ level: "warning",
+ code: "broken_reference",
+ message: `Document "${doc.path}" references missing path "${target}"`,
+ docId: doc.id,
+ path: doc.path,
+ });
+ }
+ }
+ }
+
+ if (!pack.packKey) {
+ warnings.push({
+ level: "error",
+ code: "missing_pack_key",
+ message: "Pack has no packKey",
+ });
+ }
+
+ for (const skill of skills.filter((s) => s.deletedAt === null)) {
+ const entry = docById.get(skill.entrypointDocId);
+ if (entry?.deletedAt) {
+ warnings.push({
+ level: "error",
+ code: "deleted_doc_referenced",
+ message: `Skill "${skill.name}" references deleted document`,
+ skillId: skill.id,
+ docId: entry.id,
+ });
+ }
+ }
+
+ for (const mergeId of unresolvedMergeIds) {
+ warnings.push({
+ level: "error",
+ code: "unresolved_merge",
+ message: `Pack has unresolved merge ${mergeId}`,
+ });
+ }
+
+ return warnings;
+}
diff --git a/src/nodes/skill/__tests__/generator.test.ts b/src/nodes/skill/__tests__/generator.test.ts
new file mode 100644
index 0000000..463720a
--- /dev/null
+++ b/src/nodes/skill/__tests__/generator.test.ts
@@ -0,0 +1,78 @@
+import { describe, expect, it } from "bun:test";
+import { generator } from "@/nodes/skill/generator";
+import type { SkillNodeData } from "@/nodes/skill/types";
+import { WorkflowNodeType } from "@/types/workflow";
+import type { SkillBundle } from "@/types/library";
+
+function buildData(overrides: Partial = {}): SkillNodeData {
+ return {
+ type: WorkflowNodeType.Skill,
+ label: "Skill",
+ name: "n1",
+ skillName: "test-skill",
+ description: "desc",
+ promptText: "inline body",
+ detectedVariables: [],
+ variableMappings: {},
+ metadata: [],
+ libraryRef: null,
+ ...overrides,
+ };
+}
+
+describe("skill generator", () => {
+ it("emits inline content when no libraryRef", () => {
+ const data = buildData();
+ const file = generator.getSkillFile?.("n1", data);
+ expect(file).not.toBeNull();
+ expect(file?.path).toContain("/skills/test-skill/SKILL.md");
+ expect(file?.content).toContain("inline body");
+ expect(file?.content).toContain("name: test-skill");
+ });
+
+ it("uses resolved bundle content when libraryRef is set", () => {
+ const data = buildData({
+ libraryRef: {
+ scope: "workspace",
+ packId: "p1",
+ packKey: "support",
+ packVersion: "1.0.0",
+ skillId: "s1",
+ skillKey: "triage",
+ },
+ });
+ const bundle: SkillBundle = {
+ scope: "workspace",
+ packId: "p1",
+ packKey: "support",
+ packVersion: "1.0.0",
+ skillId: "s1",
+ skillKey: "triage",
+ skillName: "Triage",
+ description: "Triage skill",
+ entrypoint: { docId: "d1", path: "SKILL.md", role: "skill-entrypoint", content: "---\nname: triage\n---\nfrom-pack-content", contentHash: "" },
+ documents: [],
+ manifestHash: "",
+ };
+ const file = generator.getSkillFile?.("n1", data, { resolvedBundle: bundle });
+ expect(file?.content).toContain("from-pack-content");
+ expect(file?.path).toContain("/skills/triage/SKILL.md");
+ });
+
+ it("falls back to inline when libraryRef present but no bundle resolved", () => {
+ const data = buildData({
+ libraryRef: { scope: "workspace", packId: "p1", packVersion: "draft", skillId: "s1" },
+ });
+ const file = generator.getSkillFile?.("n1", data);
+ expect(file?.content).toContain("inline body");
+ });
+
+ it("getDetailsSection mentions library reference when present", () => {
+ const data = buildData({
+ libraryRef: { scope: "workspace", packId: "p1", packKey: "support", packVersion: "1.0.0", skillId: "s1", skillKey: "triage" },
+ });
+ const section = generator.getDetailsSection?.("n1", data);
+ expect(section).toContain("Library Reference");
+ expect(section).toContain("support");
+ });
+});
diff --git a/src/nodes/skill/constants.ts b/src/nodes/skill/constants.ts
index b6fef2f..8b5cb10 100644
--- a/src/nodes/skill/constants.ts
+++ b/src/nodes/skill/constants.ts
@@ -16,6 +16,7 @@ export const skillRegistryEntry: NodeRegistryEntry = {
defaultData: (): SkillNodeData => ({
type: WorkflowNodeType.Skill, label: "Skill", name: "", skillName: "",
description: "", promptText: "", detectedVariables: [], variableMappings: {}, metadata: [],
+ libraryRef: null,
}),
aiGenerationPrompt: {
description: "Reusable knowledge/instruction unit that gets attached to agents. Skills represent specialised capabilities the agent should have. A skill node generates a `.opencode/skills//SKILL.md` file.",
@@ -99,4 +100,16 @@ export const skillSchema = z.object({
})
)
.default([]),
+ libraryRef: z
+ .object({
+ scope: z.enum(["workspace", "user"]),
+ packId: z.string(),
+ packKey: z.string().optional(),
+ packVersion: z.string(),
+ skillId: z.string(),
+ skillKey: z.string().optional(),
+ skillName: z.string().optional(),
+ })
+ .nullable()
+ .default(null),
});
diff --git a/src/nodes/skill/fields.tsx b/src/nodes/skill/fields.tsx
index ed18341..5eebdd0 100644
--- a/src/nodes/skill/fields.tsx
+++ b/src/nodes/skill/fields.tsx
@@ -17,8 +17,9 @@ import { useAutoResourceVariableMapping } from "@/nodes/shared/use-auto-resource
import { useDetectedVariables } from "@/nodes/shared/use-detected-variables";
import { useConnectedResources } from "@/nodes/shared/use-connected-resources";
import { WorkflowNodeType } from "@/types/workflow";
-import type { SkillMetadataEntry } from "./types";
+import type { SkillMetadataEntry, SkillLibraryRef } from "./types";
import { buildSkillScriptRelativePath, getSkillScriptBaseName, getSkillScriptFileName } from "./script-utils";
+import { LibraryRefSection } from "@/components/workflow/properties/skill-picker-dialog";
const SLUG_REGEX = /^[a-z0-9]+(-[a-z0-9]+)*$/;
const isValidSlug = (v: string) => { const t = v.trim(); return t === "" || SLUG_REGEX.test(t); };
@@ -44,6 +45,7 @@ export function Fields({ register, control, setValue, nodeId }: SkillFieldsProps
[rawVariableMappings],
);
const metadata: SkillMetadataEntry[] = useWatch({ control, name: "metadata" }) ?? [];
+ const libraryRef = useWatch({ control, name: "libraryRef" }) as SkillLibraryRef | null | undefined;
const { connectedScripts, availableResources, deleteEdge } = useConnectedResources(nodeId);
const skillFolder = (skillName.trim() || label.trim() || "skill")
.toLowerCase()
@@ -89,6 +91,12 @@ export function Fields({ register, control, setValue, nodeId }: SkillFieldsProps
return (
+
+ setValue("libraryRef" as never, value as never, { shouldDirty: true })
+ }
+ />
Skill Name
diff --git a/src/nodes/skill/generator.ts b/src/nodes/skill/generator.ts
index 16b470c..b04626e 100644
--- a/src/nodes/skill/generator.ts
+++ b/src/nodes/skill/generator.ts
@@ -1,6 +1,7 @@
import type { NodeGeneratorModule } from "@/nodes/shared/registry-types";
import { mermaidId, mermaidLabel } from "@/nodes/shared/mermaid-utils";
import type { WorkflowNodeData } from "@/types/workflow";
+import type { SkillBundle } from "@/types/library";
import {
buildGeneratedSkillFilePath,
DEFAULT_GENERATION_TARGET,
@@ -111,11 +112,17 @@ function buildSkillFile(
return lines.join("\n") + "\n";
}
+export interface SkillGeneratorOptions {
+ connectedScripts?: ConnectedSkillScript[];
+ target?: GenerationTargetId;
+ resolvedBundle?: SkillBundle | null;
+}
+
export const generator: NodeGeneratorModule & {
getSkillFile?(
nodeId: string,
data: WorkflowNodeData,
- connectedScripts?: ConnectedSkillScript[] | GenerationTargetId,
+ connectedScripts?: ConnectedSkillScript[] | GenerationTargetId | SkillGeneratorOptions,
target?: GenerationTargetId,
): { path: string; content: string } | null;
} = {
@@ -125,25 +132,47 @@ export const generator: NodeGeneratorModule & {
},
getDetailsSection(nodeId: string, data: WorkflowNodeData): string {
const d = data as SkillNodeData;
- return [
+ const lines = [
`#### Skill: ${d.label || d.name}`,
"",
`- **Skill Name:** ${d.skillName || "_not set_"}`,
- ].join("\n");
+ ];
+ if (d.libraryRef) {
+ lines.push(`- **Library Reference:** ${d.libraryRef.scope}/${d.libraryRef.packKey ?? d.libraryRef.packId}@${d.libraryRef.packVersion} → ${d.libraryRef.skillKey ?? d.libraryRef.skillId}`);
+ }
+ return lines.join("\n");
},
getSkillFile(
_nodeId: string,
data: WorkflowNodeData,
- connectedScriptsOrTarget?: ConnectedSkillScript[] | GenerationTargetId,
+ connectedScriptsOrTarget?: ConnectedSkillScript[] | GenerationTargetId | SkillGeneratorOptions,
target: GenerationTargetId = DEFAULT_GENERATION_TARGET,
) {
const d = data as SkillNodeData;
+ let connectedScripts: ConnectedSkillScript[] = [];
+ let resolvedTarget: GenerationTargetId = target ?? DEFAULT_GENERATION_TARGET;
+ let resolvedBundle: SkillBundle | null = null;
+
+ if (Array.isArray(connectedScriptsOrTarget)) {
+ connectedScripts = connectedScriptsOrTarget;
+ } else if (typeof connectedScriptsOrTarget === "string") {
+ resolvedTarget = connectedScriptsOrTarget;
+ } else if (connectedScriptsOrTarget && typeof connectedScriptsOrTarget === "object") {
+ connectedScripts = connectedScriptsOrTarget.connectedScripts ?? [];
+ resolvedTarget = connectedScriptsOrTarget.target ?? resolvedTarget;
+ resolvedBundle = connectedScriptsOrTarget.resolvedBundle ?? null;
+ }
+
+ if (resolvedBundle && d.libraryRef) {
+ const skillName = resolvedBundle.skillKey || resolveSkillSlug(d) || "skill";
+ return {
+ path: buildGeneratedSkillFilePath(skillName, resolvedTarget),
+ content: resolvedBundle.entrypoint.content,
+ };
+ }
+
const skillName = resolveSkillSlug(d);
if (!skillName) return null;
- const connectedScripts = Array.isArray(connectedScriptsOrTarget) ? connectedScriptsOrTarget : [];
- const resolvedTarget = Array.isArray(connectedScriptsOrTarget)
- ? target
- : (connectedScriptsOrTarget ?? target ?? DEFAULT_GENERATION_TARGET);
return {
path: buildGeneratedSkillFilePath(skillName, resolvedTarget),
content: buildSkillFile(skillName, d, connectedScripts, resolvedTarget),
diff --git a/src/nodes/skill/node.tsx b/src/nodes/skill/node.tsx
index b329d84..a06bcbe 100644
--- a/src/nodes/skill/node.tsx
+++ b/src/nodes/skill/node.tsx
@@ -5,7 +5,7 @@ import { BaseNode, NodeSize } from "@/nodes/shared/base-node";
import { detectVarCounts } from "@/nodes/shared/variable-utils";
import { HANDLE_CLASS } from "@/lib/theme";
import { NODE_ACCENT } from "@/lib/node-colors";
-import { Braces, DollarSign, FileCode2, Link2, Sparkles, Zap } from "lucide-react";
+import { Braces, DollarSign, FileCode2, Library, Link2, Sparkles, Zap } from "lucide-react";
import { skillRegistryEntry } from "./constants";
import type { SkillNodeData } from "./types";
import { useWorkflowStore } from "@/store/workflow";
@@ -52,6 +52,14 @@ export const SkillNode = memo(function SkillNode({ id, data, selected }: NodePro
return (
+ {data.libraryRef && (
+
+
+
+ {data.libraryRef.scope}/{data.libraryRef.packKey ?? data.libraryRef.packId}@{data.libraryRef.packVersion}
+
+
+ )}
{!!data.description?.trim() && (
{data.description}
)}
diff --git a/src/nodes/skill/types.ts b/src/nodes/skill/types.ts
index f511762..1672c82 100644
--- a/src/nodes/skill/types.ts
+++ b/src/nodes/skill/types.ts
@@ -1,10 +1,21 @@
import { WorkflowNodeType } from "@/types/workflow";
+import type { LibraryScope } from "@/types/library";
export interface SkillMetadataEntry {
key: string;
value: string;
}
+export interface SkillLibraryRef {
+ scope: LibraryScope;
+ packId: string;
+ packKey?: string;
+ packVersion: string | "draft";
+ skillId: string;
+ skillKey?: string;
+ skillName?: string;
+}
+
export interface SkillNodeData extends Record
{
type: WorkflowNodeType.Skill;
label: string;
@@ -15,4 +26,5 @@ export interface SkillNodeData extends Record {
detectedVariables: string[];
variableMappings: Record;
metadata: SkillMetadataEntry[];
+ libraryRef?: SkillLibraryRef | null;
}
diff --git a/src/store/__tests__/library-docs.test.ts b/src/store/__tests__/library-docs.test.ts
new file mode 100644
index 0000000..0b808da
--- /dev/null
+++ b/src/store/__tests__/library-docs.test.ts
@@ -0,0 +1,64 @@
+import { describe, expect, it, beforeEach, mock } from "bun:test";
+
+const originalFetch = globalThis.fetch;
+
+describe("useLibraryDocsStore", () => {
+ beforeEach(() => {
+ mock.module("@/lib/library-client", () => ({
+ libraryBootstrap: async () => ({ workspaceId: "ws-1", ownerUserId: null, libraries: [] }),
+ listPacksForScope: async () => [],
+ createPack: async () => ({ id: "p1", libraryId: "l1", packKey: "p", name: "P" }),
+ forkPack: async () => ({ id: "f1", libraryId: "l2", packKey: "p-fork" }),
+ softDeletePack: async () => undefined,
+ listDocuments: async () => [],
+ listSkills: async () => [],
+ listPackVersions: async () => [],
+ listSkillVersions: async () => [],
+ createDocument: async () => ({ document: { id: "d1", currentVersionId: "v1" }, version: { id: "v1" } }),
+ saveDocumentVersion: async () => ({ id: "v2" }),
+ deleteDocument: async () => undefined,
+ getDocumentVersionContent: async () => "content",
+ createSkill: async () => ({ id: "s1" }),
+ deleteSkill: async () => undefined,
+ publishPackVersion: async () => ({ id: "pv1", version: "1.0.0" }),
+ publishSkillVersion: async () => ({ id: "sv1", version: "1.0.0" }),
+ mergeBaseIntoBranch: async () => ({ id: "m1", status: "clean", conflictDocs: [], mergedCleanlyDocs: [] }),
+ listMergeConflicts: async () => [],
+ resolveMergeConflict: async () => ({ id: "m1", status: "resolved" }),
+ validatePack: async () => [],
+ resolveLiveSkill: async () => null,
+ exportNexusArchive: async () => new Blob(),
+ importNexusArchive: async () => [],
+ listDocumentVersions: async () => [],
+ }));
+ });
+
+ it("bootstrap sets workspaceId and marks bootstrapped", async () => {
+ const { useLibraryDocsStore } = await import("@/store/library-docs/store");
+ await useLibraryDocsStore.getState().bootstrap();
+ expect(useLibraryDocsStore.getState().bootstrapped).toBe(true);
+ expect(useLibraryDocsStore.getState().workspaceId).toBe("ws-1");
+ });
+
+ it("createPack invokes API and triggers refresh", async () => {
+ const { useLibraryDocsStore } = await import("@/store/library-docs/store");
+ await useLibraryDocsStore.getState().bootstrap();
+ const pack = await useLibraryDocsStore.getState().createPack("workspace", "p", "P");
+ expect(pack.id).toBe("p1");
+ });
+
+ it("selectPack updates state", async () => {
+ const { useLibraryDocsStore } = await import("@/store/library-docs/store");
+ useLibraryDocsStore.getState().selectPack("p1");
+ expect(useLibraryDocsStore.getState().selectedPackId).toBe("p1");
+ });
+
+ it("mergeBase records pending merge", async () => {
+ const { useLibraryDocsStore } = await import("@/store/library-docs/store");
+ const merge = await useLibraryDocsStore.getState().mergeBase("p1");
+ expect(merge.status).toBe("clean");
+ expect(useLibraryDocsStore.getState().pendingMerges["p1"].id).toBe("m1");
+ });
+});
+
+if (originalFetch) globalThis.fetch = originalFetch;
diff --git a/src/store/library-docs/index.ts b/src/store/library-docs/index.ts
new file mode 100644
index 0000000..0b35be6
--- /dev/null
+++ b/src/store/library-docs/index.ts
@@ -0,0 +1 @@
+export { useLibraryDocsStore } from "./store";
diff --git a/src/store/library-docs/store.ts b/src/store/library-docs/store.ts
new file mode 100644
index 0000000..f850dd8
--- /dev/null
+++ b/src/store/library-docs/store.ts
@@ -0,0 +1,298 @@
+"use client";
+
+import { create } from "zustand";
+import {
+ createDocument as apiCreateDocument,
+ createPack as apiCreatePack,
+ createSkill as apiCreateSkill,
+ deleteDocument as apiDeleteDocument,
+ deleteSkill as apiDeleteSkill,
+ exportNexusArchive as apiExport,
+ forkPack as apiForkPack,
+ getDocumentVersionContent as apiGetVersionContent,
+ importNexusArchive as apiImport,
+ libraryBootstrap as apiBootstrap,
+ listDocuments as apiListDocuments,
+ listDocumentVersions as apiListVersions,
+ listMergeConflicts as apiListConflicts,
+ listPackVersions as apiListPackVersions,
+ listPacksForScope as apiListPacks,
+ listSkills as apiListSkills,
+ listSkillVersions as apiListSkillVersions,
+ mergeBaseIntoBranch as apiMergeBase,
+ publishPackVersion as apiPublishPack,
+ publishSkillVersion as apiPublishSkill,
+ resolveLiveSkill as apiResolveLive,
+ resolveMergeConflict as apiResolveConflict,
+ saveDocumentVersion as apiSaveVersion,
+ softDeletePack as apiSoftDeletePack,
+ validatePack as apiValidatePack,
+} from "@/lib/library-client";
+import type {
+ ConflictRecord,
+ LibraryDocumentRecord,
+ LibraryDocumentVersionRecord,
+ MergeRecord,
+ PackRecord,
+ PackVersionRecord,
+ SkillRecord,
+ SkillVersionRecord,
+} from "@/lib/library-store/types";
+import type {
+ LibraryScope,
+ SkillBundle,
+ SkillRef,
+ ValidationWarning,
+} from "@/types/library";
+
+interface LibraryDocsState {
+ bootstrapped: boolean;
+ workspaceId: string | null;
+
+ workspacePacks: PackRecord[];
+ userPacks: PackRecord[];
+
+ selectedPackId: string | null;
+ documents: Record;
+ skills: Record;
+ packVersions: Record;
+ skillVersions: Record;
+ documentVersions: Record;
+ documentContent: Record;
+ validationWarnings: Record;
+ pendingMerges: Record;
+ conflicts: Record;
+
+ loading: boolean;
+ saving: boolean;
+ error: string | null;
+
+ bootstrap: () => Promise;
+ refreshPacks: (scope: LibraryScope) => Promise;
+ createPack: (scope: LibraryScope, packKey: string, name: string, description?: string) => Promise;
+ forkPack: (packId: string, targetScope?: LibraryScope) => Promise;
+ softDeletePack: (packId: string) => Promise;
+
+ selectPack: (packId: string | null) => void;
+ loadPackDetail: (packId: string) => Promise;
+
+ createDocument: (packId: string, payload: { role: LibraryDocumentRecord["role"]; path: string; content: string }) => Promise;
+ saveDocument: (packId: string, docId: string, content: string, previousVersionId: string | null, message?: string) => Promise;
+ deleteDocument: (packId: string, docId: string) => Promise;
+ loadDocumentContent: (packId: string, docId: string, versionId: string) => Promise;
+
+ createSkill: (packId: string, payload: { skillKey: string; name: string; description: string; entrypointDocId: string }) => Promise;
+ deleteSkill: (packId: string, skillId: string) => Promise;
+
+ publishPack: (packId: string, version: string, notes?: string) => Promise;
+ publishSkill: (packId: string, skillId: string, version: string, notes?: string) => Promise;
+
+ mergeBase: (packId: string) => Promise;
+ loadConflicts: (packId: string, mergeId: string) => Promise;
+ resolveConflict: (packId: string, mergeId: string, resolved: Record) => Promise;
+
+ validatePackById: (packId: string) => Promise;
+ resolveLiveSkill: (ref: SkillRef) => Promise;
+
+ exportArchive: (workflowJson: unknown, workflowName: string) => Promise;
+ importArchive: (file: File, scope?: LibraryScope) => Promise;
+}
+
+export const useLibraryDocsStore = create((set, get) => ({
+ bootstrapped: false,
+ workspaceId: null,
+ workspacePacks: [],
+ userPacks: [],
+ selectedPackId: null,
+ documents: {},
+ skills: {},
+ packVersions: {},
+ skillVersions: {},
+ documentVersions: {},
+ documentContent: {},
+ validationWarnings: {},
+ pendingMerges: {},
+ conflicts: {},
+ loading: false,
+ saving: false,
+ error: null,
+
+ bootstrap: async () => {
+ if (get().bootstrapped) return;
+ set({ loading: true, error: null });
+ try {
+ const session = await apiBootstrap();
+ set({ workspaceId: session.workspaceId, bootstrapped: true });
+ await Promise.all([get().refreshPacks("workspace"), get().refreshPacks("user")]);
+ } catch (err) {
+ set({ error: (err as Error).message });
+ } finally {
+ set({ loading: false });
+ }
+ },
+
+ refreshPacks: async (scope) => {
+ const packs = await apiListPacks(scope);
+ if (scope === "workspace") set({ workspacePacks: packs });
+ else set({ userPacks: packs });
+ },
+
+ createPack: async (scope, packKey, name, description) => {
+ set({ saving: true });
+ try {
+ const pack = await apiCreatePack(scope, packKey, name, description);
+ await get().refreshPacks(scope);
+ return pack;
+ } finally {
+ set({ saving: false });
+ }
+ },
+
+ forkPack: async (packId, targetScope = "user") => {
+ set({ saving: true });
+ try {
+ const fork = await apiForkPack(packId, targetScope);
+ await get().refreshPacks(targetScope);
+ return fork;
+ } finally {
+ set({ saving: false });
+ }
+ },
+
+ softDeletePack: async (packId) => {
+ await apiSoftDeletePack(packId);
+ await Promise.all([get().refreshPacks("workspace"), get().refreshPacks("user")]);
+ },
+
+ selectPack: (packId) => set({ selectedPackId: packId }),
+
+ loadPackDetail: async (packId) => {
+ set({ loading: true });
+ try {
+ const [documents, skills, packVersions] = await Promise.all([
+ apiListDocuments(packId),
+ apiListSkills(packId),
+ apiListPackVersions(packId),
+ ]);
+ set((state) => ({
+ documents: { ...state.documents, [packId]: documents },
+ skills: { ...state.skills, [packId]: skills },
+ packVersions: { ...state.packVersions, [packId]: packVersions },
+ }));
+ const skillVersionsForPack: Record = { ...get().skillVersions };
+ for (const skill of skills) {
+ skillVersionsForPack[skill.id] = await apiListSkillVersions(packId, skill.id);
+ }
+ set({ skillVersions: skillVersionsForPack });
+ } finally {
+ set({ loading: false });
+ }
+ },
+
+ createDocument: async (packId, payload) => {
+ set({ saving: true });
+ try {
+ const { document } = await apiCreateDocument(packId, payload);
+ await get().loadPackDetail(packId);
+ return document;
+ } finally {
+ set({ saving: false });
+ }
+ },
+
+ saveDocument: async (packId, docId, content, previousVersionId, message) => {
+ set({ saving: true });
+ try {
+ const version = await apiSaveVersion(packId, docId, { content, previousVersionId, message });
+ const versions = await apiListVersions(packId, docId);
+ set((state) => ({
+ documentVersions: { ...state.documentVersions, [docId]: versions },
+ documentContent: { ...state.documentContent, [docId]: content },
+ }));
+ await get().loadPackDetail(packId);
+ return version;
+ } finally {
+ set({ saving: false });
+ }
+ },
+
+ deleteDocument: async (packId, docId) => {
+ await apiDeleteDocument(packId, docId);
+ await get().loadPackDetail(packId);
+ },
+
+ loadDocumentContent: async (packId, docId, versionId) => {
+ const content = await apiGetVersionContent(packId, docId, versionId);
+ set((state) => ({ documentContent: { ...state.documentContent, [docId]: content } }));
+ return content;
+ },
+
+ createSkill: async (packId, payload) => {
+ const skill = await apiCreateSkill(packId, payload);
+ await get().loadPackDetail(packId);
+ return skill;
+ },
+
+ deleteSkill: async (packId, skillId) => {
+ await apiDeleteSkill(packId, skillId);
+ await get().loadPackDetail(packId);
+ },
+
+ publishPack: async (packId, version, notes) => {
+ const packVersion = await apiPublishPack(packId, { version, notes });
+ await get().loadPackDetail(packId);
+ return packVersion;
+ },
+
+ publishSkill: async (packId, skillId, version, notes) => {
+ const skillVersion = await apiPublishSkill(packId, skillId, { version, notes });
+ await get().loadPackDetail(packId);
+ return skillVersion;
+ },
+
+ mergeBase: async (packId) => {
+ const merge = await apiMergeBase(packId);
+ set((state) => ({ pendingMerges: { ...state.pendingMerges, [packId]: merge } }));
+ if (merge.status === "conflict") {
+ const conflicts = await apiListConflicts(packId, merge.id);
+ set((state) => ({ conflicts: { ...state.conflicts, [merge.id]: conflicts } }));
+ }
+ await get().loadPackDetail(packId);
+ return merge;
+ },
+
+ loadConflicts: async (packId, mergeId) => {
+ const conflicts = await apiListConflicts(packId, mergeId);
+ set((state) => ({ conflicts: { ...state.conflicts, [mergeId]: conflicts } }));
+ return conflicts;
+ },
+
+ resolveConflict: async (packId, mergeId, resolved) => {
+ const merge = await apiResolveConflict(packId, mergeId, { resolvedContentByDocId: resolved });
+ set((state) => ({
+ pendingMerges: { ...state.pendingMerges, [packId]: merge },
+ }));
+ await get().loadPackDetail(packId);
+ return merge;
+ },
+
+ validatePackById: async (packId) => {
+ const warnings = await apiValidatePack(packId);
+ set((state) => ({ validationWarnings: { ...state.validationWarnings, [packId]: warnings } }));
+ return warnings;
+ },
+
+ resolveLiveSkill: async (ref) => {
+ return apiResolveLive(ref);
+ },
+
+ exportArchive: async (workflowJson, workflowName) => {
+ return apiExport(workflowJson, workflowName);
+ },
+
+ importArchive: async (file, scope = "workspace") => {
+ const packs = await apiImport(file, scope);
+ await Promise.all([get().refreshPacks("workspace"), get().refreshPacks("user")]);
+ return packs;
+ },
+}));
diff --git a/src/types/library.ts b/src/types/library.ts
new file mode 100644
index 0000000..fc4a8c7
--- /dev/null
+++ b/src/types/library.ts
@@ -0,0 +1,59 @@
+export type LibraryScope = "workspace" | "user";
+
+export type DocumentRole =
+ | "skill-entrypoint"
+ | "reference"
+ | "doc"
+ | "rule"
+ | "template"
+ | "example"
+ | "asset"
+ | "script"
+ | "manifest";
+
+export interface PackRef {
+ scope: LibraryScope;
+ packId: string;
+ packKey: string;
+ packVersion: string | "draft";
+}
+
+export interface SkillRef {
+ scope: LibraryScope;
+ packId: string;
+ packVersion: string | "draft";
+ skillId: string;
+}
+
+export interface SkillBundleDocument {
+ docId: string;
+ path: string;
+ role: DocumentRole;
+ content: string;
+ contentHash: string;
+}
+
+export interface SkillBundle {
+ scope: LibraryScope;
+ packId: string;
+ packKey: string;
+ packVersion: string;
+ skillId: string;
+ skillKey: string;
+ skillName: string;
+ description: string;
+ entrypoint: SkillBundleDocument;
+ documents: SkillBundleDocument[];
+ manifestHash: string;
+}
+
+export interface ValidationWarning {
+ level: "warning" | "error";
+ code: string;
+ message: string;
+ path?: string;
+ docId?: string;
+ skillId?: string;
+}
+
+export type MergeState = "clean" | "conflict" | "resolved";
diff --git a/src/types/workflow.ts b/src/types/workflow.ts
index 6d7834a..1d0bc5c 100644
--- a/src/types/workflow.ts
+++ b/src/types/workflow.ts
@@ -96,6 +96,16 @@ export interface SubWorkflowNodeData extends BaseNodeData {
disabledTools: string[];
}
+export interface SkillLibraryRef {
+ scope: "workspace" | "user";
+ packId: string;
+ packKey?: string;
+ packVersion: string | "draft";
+ skillId: string;
+ skillKey?: string;
+ skillName?: string;
+}
+
export interface SkillNodeData extends BaseNodeData {
type: WorkflowNodeType.Skill;
skillName: string;
@@ -105,6 +115,8 @@ export interface SkillNodeData extends BaseNodeData {
/** Static variable mappings: {{varName}} → script ref (e.g. "script:lint-fix.ts") */
variableMappings: Record;
metadata: Array<{ key: string; value: string }>;
+ /** Optional reference to a library-managed skill */
+ libraryRef?: SkillLibraryRef | null;
}
export type DocumentContentMode = "inline" | "linked" | "brain";
diff --git a/untitled-workflow.nexus b/untitled-workflow.nexus
new file mode 100644
index 0000000..195b2cf
Binary files /dev/null and b/untitled-workflow.nexus differ