diff --git a/CLAUDE.md b/CLAUDE.md index 5aba393..34eccdf 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -60,6 +60,7 @@ Keep the mental model high-level: - `src/nodes/` — feature modules for each node type (schema, fields, generator, helpers) - `src/store/` — Zustand stores and workflow-generation state - `src/lib/` — cross-cutting utilities: persistence, generation, registries, validation, OpenCode client, marketplace helpers +- `src/lib/storage/` — pluggable storage provider abstraction (interface, local filesystem impl, factory) - `src/hooks/` — reusable editor and data hooks - `src/types/` — shared type definitions - `docs/tasks/` — task-specific plans and notes @@ -88,6 +89,13 @@ Keep the mental model high-level: - Export targets currently include `OpenCode`, `PI`, and `Claude Code`. - Generated output names are sanitized from workflow or node names; preserve existing sanitization helpers rather than duplicating naming logic. +### Storage abstraction +- All server-side file I/O flows through a pluggable `StorageProvider` interface defined in `src/lib/storage/`. +- The default provider is `LocalFilesystemProvider` (local filesystem). Selection is driven by the `NEXUS_STORAGE_PROVIDER` env var (default: `"local"`), with `NEXUS_STORAGE_ROOT` controlling the base path. +- Use `getStorageProvider()` from `@/lib/storage` to obtain the active provider. Do not import `node:fs/promises` directly for storage operations. +- To add a new storage backend (S3, Azure Blob, etc.), implement the `StorageProvider` interface and register it via `registerStorageProvider()`. +- `workspace/server.ts`, `workspace/snapshots.ts`, `brain/server.ts`, and `collaboration/object-store.ts` all use the storage abstraction. + ### OpenCode integration - OpenCode support is optional. - Keep offline/editor-only flows working even when OpenCode is disconnected. diff --git a/docs/tasks/feature-generic-file-storage-b8dbb4a6/e2e-feature-generic-file-storage-b8dbb4a6.md b/docs/tasks/feature-generic-file-storage-b8dbb4a6/e2e-feature-generic-file-storage-b8dbb4a6.md new file mode 100644 index 0000000..4bf61f1 --- /dev/null +++ b/docs/tasks/feature-generic-file-storage-b8dbb4a6/e2e-feature-generic-file-storage-b8dbb4a6.md @@ -0,0 +1,46 @@ +# E2E Test Specification: Generic File Storage + +## User Story + +As a user, I can create workspaces, save workflows, and perform all storage operations without noticing any change — the generic storage layer is transparent. + +## Preconditions + +- Application is running at `http://localhost:3000` +- No `NEXUS_STORAGE_PROVIDER` env var is set (defaults to `"local"`) +- The `.nexus-brain` data directory is clean or in a known state + +## Test Steps + +1. **Navigate to the app** at `http://localhost:3000` +2. **Create a new workspace** via the workspace picker + - Screenshot: after workspace creation +3. **Add a workflow** to the workspace +4. **Add a Start node** and an **Agent node** to the canvas +5. **Save the workflow** (Ctrl+S) + - Screenshot: after workflow save +6. **Refresh the page** to verify the workflow persists + - Screenshot: after page refresh showing persisted data +7. **Delete the workflow** +8. **Verify it no longer appears** in the workspace + - Screenshot: after deletion + +## Success Criteria + +- All workspace and workflow CRUD operations work identically to before the refactor +- No regressions in save, load, delete, or snapshot behavior +- The storage abstraction is completely transparent to the user — no UI changes, no new error states +- Existing data created before the refactor (if any) continues to load correctly + +## Screenshot Capture Points + +1. After workspace creation — shows the new workspace in the picker +2. After workflow save — shows the workflow with nodes on canvas +3. After page refresh — confirms data was persisted and reloaded +4. After deletion — confirms the workflow is removed from the workspace + +## Additional Verification + +- Check that `.nexus-brain/workspaces/` directory structure matches the expected layout (manifest.json, workflows/*.json, snapshots/) +- Verify that Brain documents (if the Knowledge Brain feature is in use) continue to save and load correctly +- Confirm that the collaboration server (if running) still persists room state via the storage provider diff --git a/docs/tasks/feature-generic-file-storage-b8dbb4a6/plan-feature-generic-file-storage-b8dbb4a6.md b/docs/tasks/feature-generic-file-storage-b8dbb4a6/plan-feature-generic-file-storage-b8dbb4a6.md new file mode 100644 index 0000000..4d08fba --- /dev/null +++ b/docs/tasks/feature-generic-file-storage-b8dbb4a6/plan-feature-generic-file-storage-b8dbb4a6.md @@ -0,0 +1,303 @@ +# feature: Generic File Storage + +## Metadata +adw_id: `b8dbb4a6` +issue_description: `Generic file storage — the file backing should be generic and allow for multiple different file storages. S3, SharePoint, Azure Blob, etc. Make sure there is an easy way to implement and extend the existing file storage.` + +## Description +The Nexus Workflow Studio server-side persistence currently uses direct Node.js `fs` calls for all storage operations — workspaces, workflows, snapshots, brain documents, and collaboration state. There is no abstraction layer, making it impossible to swap the backing store to S3, Azure Blob Storage, SharePoint, or any other provider without rewriting every call site. + +This feature introduces a `StorageProvider` interface that abstracts file read/write/list/delete operations, a local filesystem implementation that preserves current behavior, and a provider registry with configuration-driven selection. All existing server-side storage consumers will be migrated to use the new abstraction. + +## Objective +When complete, all server-side file I/O will flow through a pluggable `StorageProvider` interface. The local filesystem provider will be the default. Adding a new storage backend (e.g., S3, Azure Blob) will require only implementing the interface and registering the provider — no changes to workspace, brain, snapshot, or collaboration code. + +## Problem Statement +Every server-side module (`workspace/server.ts`, `brain/server.ts`, `workspace/snapshots.ts`, `collaboration/object-store.ts`) imports `node:fs/promises` directly and builds file paths with `node:path`. This tight coupling means: +- Switching to cloud storage requires rewriting dozens of call sites +- Testing storage behavior requires a real filesystem +- There is no way to add cross-cutting concerns (logging, metrics, encryption) without modifying every consumer + +## Solution Statement +Introduce a layered abstraction: +1. A `StorageProvider` interface with operations: `read`, `write`, `delete`, `list`, `exists`, `stat` +2. A `LocalFilesystemProvider` that wraps current `fs` logic (zero behavior change) +3. A provider factory/registry that selects the active provider based on configuration (`NEXUS_STORAGE_PROVIDER` env var) +4. Migrate all server-side consumers to obtain storage through the factory instead of importing `fs` directly +5. Provide a clear pattern for adding new providers (documented interface + registration) + +## Code Patterns to Follow +Reference implementations: +- `src/lib/workspace/config.ts` — env-var-driven config with cached singleton pattern +- `src/lib/collaboration/object-store.ts` — closest existing abstraction (constructor-injected `dataDir`, `load`/`store` methods) +- `src/lib/workspace/server.ts` — helper patterns: `readJsonFile`, `writeJsonFile`, `ensureDir`, atomic writes +- `src/lib/brain/server.ts` — same helper patterns, shows the duplication that the abstraction will eliminate + +## Relevant Files +Use these files to complete the task: + +### Existing Files to Modify + +- **`src/lib/workspace/server.ts`** — Primary workspace/workflow CRUD; currently imports `fs` directly. All `readJsonFile`/`writeJsonFile`/`ensureDir`/`fs.unlink`/`fs.readdir`/`fs.rm`/`fs.access` calls must be replaced with `StorageProvider` methods. +- **`src/lib/workspace/snapshots.ts`** — Snapshot read/write/list; uses `fs.mkdir`, `fs.writeFile`, `fs.rename`, `fs.readFile`, `fs.readdir`. Must migrate to provider. +- **`src/lib/workspace/config.ts`** — Workspace config; will be updated to expose storage provider selection. +- **`src/lib/brain/server.ts`** — Brain document persistence; duplicates `readJsonFile`/`writeJsonFile`/`ensureDir` from workspace. Must migrate to provider. +- **`src/lib/brain/config.ts`** — Brain config; will be updated for storage provider selection. +- **`src/lib/collaboration/object-store.ts`** — Collab state persistence with `load`/`store` + atomic writes. Must migrate to provider. +- **`CLAUDE.md`** — Project instructions; should reference the new storage abstraction. + +### New Files + +- **`src/lib/storage/types.ts`** — `StorageProvider` interface, `StorageMetadata` type, `StorageProviderType` enum +- **`src/lib/storage/local-provider.ts`** — `LocalFilesystemProvider` implementing `StorageProvider` using `node:fs/promises` +- **`src/lib/storage/factory.ts`** — Provider factory: reads config, instantiates and caches the active provider +- **`src/lib/storage/config.ts`** — Storage configuration (env var parsing, defaults) +- **`src/lib/storage/index.ts`** — Public barrel export +- **`src/lib/storage/__tests__/local-provider.test.ts`** — Unit tests for the local filesystem provider +- **`src/lib/storage/__tests__/factory.test.ts`** — Unit tests for the factory/registry +- **`docs/tasks/feature-generic-file-storage-b8dbb4a6/e2e-feature-generic-file-storage-b8dbb4a6.md`** — E2E test specification + +## Implementation Plan + +### Phase 1: Foundation +Build the storage abstraction layer from scratch: +- Define the `StorageProvider` interface with all required operations +- Define supporting types (`StorageMetadata`, config types, provider type enum) +- Implement the `LocalFilesystemProvider` that wraps current `fs` behavior +- Build the factory with env-var-driven provider selection +- Write unit tests for the local provider and factory + +### Phase 2: Core Implementation +Migrate all existing server-side consumers to use the storage provider: +- Refactor `workspace/server.ts` to use the provider instead of direct `fs` calls +- Refactor `workspace/snapshots.ts` similarly +- Refactor `brain/server.ts` similarly +- Refactor `collaboration/object-store.ts` to accept a provider +- Remove duplicated helper functions (`readJsonFile`, `writeJsonFile`, `ensureDir`) that are now handled by the provider + +### Phase 3: Integration +Ensure everything works together and is well documented: +- Update configuration files to support the new `NEXUS_STORAGE_PROVIDER` env var +- Verify all API routes continue working through the abstraction +- Update CLAUDE.md with storage architecture notes +- Ensure existing tests pass with zero behavior change + +## Step by Step Tasks +IMPORTANT: Execute every step in order, top to bottom. + +### 1. Define the StorageProvider Interface and Types +- Create `src/lib/storage/types.ts` with: + ```typescript + export interface StorageMetadata { + size: number; + lastModified: string; + } + + export interface StorageProvider { + /** Read a file as a UTF-8 string. Returns null if not found. */ + read(key: string): Promise; + + /** Read a file as raw bytes. Returns null if not found. */ + readBytes(key: string): Promise; + + /** Write a UTF-8 string to a key. Creates parent directories/prefixes as needed. */ + write(key: string, content: string): Promise; + + /** Write raw bytes to a key. Creates parent directories/prefixes as needed. */ + writeBytes(key: string, content: Uint8Array): Promise; + + /** Atomically write content (write to temp, then rename). Providers that don't support atomic ops fall back to regular write. */ + writeAtomic(key: string, content: string | Uint8Array): Promise; + + /** Delete a single key. Returns true if deleted, false if not found. */ + delete(key: string): Promise; + + /** Recursively delete a key prefix/directory. Returns true if anything was deleted. */ + deleteTree(key: string): Promise; + + /** Check if a key exists. */ + exists(key: string): Promise; + + /** Get metadata for a key. Returns null if not found. */ + stat(key: string): Promise; + + /** List immediate children under a prefix. Returns relative names. */ + list(prefix: string): Promise; + + /** List immediate child directories under a prefix. Returns relative names. */ + listDirectories(prefix: string): Promise; + } + + export type StorageProviderType = "local" | string; + ``` + +### 2. Create the Storage Configuration +- Create `src/lib/storage/config.ts`: + - Read `NEXUS_STORAGE_PROVIDER` env var (default: `"local"`) + - Read `NEXUS_STORAGE_ROOT` env var as the base path/bucket (default: derived from `NEXUS_BRAIN_DATA_DIR` or `process.cwd() + "/.nexus-brain"`) + - Use the cached singleton pattern from `workspace/config.ts` + +### 3. Implement LocalFilesystemProvider +- Create `src/lib/storage/local-provider.ts`: + - Constructor takes a `rootDir: string` parameter + - All keys are resolved relative to `rootDir` + - `read(key)` — `fs.readFile(path, "utf8")` with try/catch returning null + - `readBytes(key)` — `fs.readFile(path)` returning `Uint8Array` + - `write(key, content)` — `ensureDir` + `fs.writeFile` + - `writeBytes(key, content)` — `ensureDir` + `fs.writeFile` + - `writeAtomic(key, content)` — write to `path.tmp`, then `fs.rename` + - `delete(key)` — `fs.unlink` with try/catch + - `deleteTree(key)` — `fs.rm({ recursive: true, force: true })` + - `exists(key)` — `fs.access` with try/catch + - `stat(key)` — `fs.stat` returning `{ size, lastModified }` + - `list(prefix)` — `fs.readdir` filtering for files + - `listDirectories(prefix)` — `fs.readdir` filtering for directories + - Include path traversal guard: all resolved paths must remain within `rootDir` + +### 4. Build the Provider Factory +- Create `src/lib/storage/factory.ts`: + - `getStorageProvider(): StorageProvider` — reads config, instantiates and caches the provider + - For `"local"` type, creates `LocalFilesystemProvider` with the configured root + - For unknown types, throws an error with a helpful message listing available providers + - `resetStorageProvider(): void` — clears the cache (for testing) + - Export a `registerStorageProvider(type: string, factory: () => StorageProvider)` for extensibility + +### 5. Create Barrel Export +- Create `src/lib/storage/index.ts` exporting: + - `StorageProvider`, `StorageMetadata`, `StorageProviderType` from `types.ts` + - `getStorageProvider`, `resetStorageProvider`, `registerStorageProvider` from `factory.ts` + - `LocalFilesystemProvider` from `local-provider.ts` + +### 6. Write Unit Tests for Storage Layer +- Create `src/lib/storage/__tests__/local-provider.test.ts`: + - Test `read`/`write` round-trip + - Test `readBytes`/`writeBytes` round-trip + - Test `writeAtomic` produces correct file + - Test `delete` returns true for existing, false for missing + - Test `deleteTree` removes directory recursively + - Test `exists` returns true/false correctly + - Test `stat` returns correct metadata + - Test `list` returns file names under prefix + - Test `listDirectories` returns only directories + - Test path traversal prevention (keys with `..`) + - Use `os.tmpdir()` + unique directory for test isolation +- Create `src/lib/storage/__tests__/factory.test.ts`: + - Test default provider is `LocalFilesystemProvider` + - Test caching (same instance returned) + - Test `resetStorageProvider` clears cache + - Test `registerStorageProvider` for custom types + +### 7. Migrate workspace/server.ts to StorageProvider +- Import `getStorageProvider` from `@/lib/storage` +- Remove direct `import fs from "node:fs/promises"` and `import path from "node:path"` (keep `path` if needed for ID validation only) +- Remove local `readJsonFile`, `writeJsonFile`, `ensureDir` helpers +- Create a module-level helper `storage()` that calls `getStorageProvider()` +- Convert each function: + - `listWorkspaces()` — use `storage().listDirectories("workspaces")` + `storage().read()` for manifests + - `createWorkspace()` — use `storage().write()` for manifest + - `getWorkspace()` — use `storage().read()` for manifest + - `updateWorkspace()` — use `storage().read()` + `storage().write()` + - `deleteWorkspace()` — use `storage().deleteTree()` + - `createWorkflow()` — use `storage().write()` for workflow + manifest + - `getWorkflow()` — use `storage().read()` + - `saveWorkflow()` — use `storage().write()` + - `updateWorkflowMeta()` — use `storage().read()` + `storage().write()` + - `deleteWorkflow()` — use `storage().delete()` + `storage().write()` +- Key mapping: current `workspaceDir(id)` path becomes a key like `workspaces/{id}`, `manifestPath` becomes `workspaces/{id}/manifest.json`, etc. +- IMPORTANT: The workspace config `dataDir` is currently used as the root. After migration, the storage provider's root replaces this. Ensure `workspace/config.ts` still provides any non-storage config values, but the `dataDir` for file I/O is no longer used directly. + +### 8. Migrate workspace/snapshots.ts to StorageProvider +- Import `getStorageProvider` from `@/lib/storage` +- Remove direct `fs` import +- Convert: + - `writeSnapshot()` — use `storage().writeAtomic()` with key `workspaces/{wsId}/snapshots/{wfId}/{timestamp}.json` + - `listSnapshots()` — use `storage().list()` on the snapshots prefix + - `getSnapshot()` — use `storage().read()` + +### 9. Migrate brain/server.ts to StorageProvider +- Import `getStorageProvider` from `@/lib/storage` +- Remove direct `fs` import and local `readJsonFile`/`writeJsonFile`/`ensureDir` duplicates +- Convert all file operations to use the storage provider +- Brain keys: `brain/manifest.json`, `brain/live/{wsId}/{docId}.json`, `brain/versions/{wsId}/{docId}/{versionId}.json` + +### 10. Migrate collaboration/object-store.ts to StorageProvider +- Modify `CollabObjectStore` constructor to accept a `StorageProvider` instead of (or in addition to) a `dataDir` string +- Convert `load()` to use `provider.readBytes()` +- Convert `store()` to use `provider.writeAtomic()` for both state and metadata +- Keys: `collab/rooms/{roomHash}/state.bin`, `collab/rooms/{roomHash}/metadata.json` + +### 11. Update Configuration Files +- Update `src/lib/workspace/config.ts`: + - The `dataDir` can remain for backward compatibility but document that storage operations now go through the provider + - Alternatively, simplify to only expose non-storage config +- Update `src/lib/brain/config.ts` similarly +- Ensure `NEXUS_STORAGE_PROVIDER` and `NEXUS_STORAGE_ROOT` are documented + +### 12. Update Call Sites that Instantiate CollabObjectStore +- Search for all `new CollabObjectStore(...)` calls +- Update them to pass the storage provider (or a scoped sub-provider) + +### 13. Create E2E Test Specification +- Create `docs/tasks/feature-generic-file-storage-b8dbb4a6/e2e-feature-generic-file-storage-b8dbb4a6.md` with: + - **User Story**: As a user, I can create workspaces, save workflows, and perform all storage operations without noticing any change — the generic storage layer is transparent. + - **Test Steps**: + 1. Navigate to the app at `http://localhost:3000` + 2. Create a new workspace via the workspace picker + 3. Add a workflow to the workspace + 4. Add a Start node and an Agent node to the canvas + 5. Save the workflow (Ctrl+S) + 6. Verify the workflow persists by refreshing the page + 7. Delete the workflow + 8. Verify it no longer appears in the workspace + 9. Screenshot at each key state + - **Success Criteria**: All workspace and workflow CRUD operations work identically to before the refactor. No regressions in save, load, delete, or snapshot behavior. + - **Screenshot capture points**: After workspace creation, after workflow save, after page refresh showing persisted data, after deletion + +### 14. Run Validation Commands +- Run `bun run typecheck` — must pass with zero errors +- Run `bun run lint` — must pass with zero errors +- Run `bun run build` — must build successfully (this change affects server-side wiring) +- Run any existing tests (`bun run test` if available, or targeted test commands) + +## Testing Strategy + +### Unit Tests +- `local-provider.test.ts`: Full coverage of all `StorageProvider` interface methods against real temp filesystem +- `factory.test.ts`: Provider selection, caching, reset, custom registration +- Existing workspace/brain tests (if any) should continue to pass without modification + +### Edge Cases +- Key with path traversal attempt (`../../etc/passwd`) — must be rejected +- Read/write of empty string content +- Read of non-existent key returns `null` (not throw) +- `writeAtomic` with concurrent writes (temp file cleanup) +- `list` on non-existent prefix returns empty array (not throw) +- `deleteTree` on non-existent path returns `false` +- Very long key names +- Keys with special characters +- Binary content via `readBytes`/`writeBytes` + +## Acceptance Criteria +- A `StorageProvider` interface exists in `src/lib/storage/types.ts` with `read`, `readBytes`, `write`, `writeBytes`, `writeAtomic`, `delete`, `deleteTree`, `exists`, `stat`, `list`, and `listDirectories` methods +- A `LocalFilesystemProvider` implements the interface and preserves all current filesystem behavior +- A factory in `src/lib/storage/factory.ts` provides the active provider based on `NEXUS_STORAGE_PROVIDER` env var +- `registerStorageProvider()` allows third-party providers to be added at runtime +- `workspace/server.ts`, `workspace/snapshots.ts`, `brain/server.ts`, and `collaboration/object-store.ts` no longer import `node:fs/promises` directly for storage operations +- All existing functionality works identically (zero behavior change for end users) +- Unit tests cover the local provider and factory +- `bun run typecheck`, `bun run lint`, and `bun run build` all pass + +## Validation Commands +Execute every command to validate the work is complete with zero regressions. + +- `bun run typecheck` — type check +- `bun run lint` — code quality +- `bun run build` — build check +- `bun run test:lib` — library tests (if storage tests are placed under lib) + +## Notes +- The `LocalFilesystemProvider` should be a drop-in replacement for current behavior. No user-visible changes should result from this refactor alone. +- Future provider implementations (S3, Azure Blob, SharePoint) only need to implement the `StorageProvider` interface and register via `registerStorageProvider()`. No core code changes required. +- The `writeAtomic` method exists because both `workspace/snapshots.ts` and `collaboration/object-store.ts` use the temp-file-then-rename pattern for crash safety. Cloud providers can implement this as a regular write if atomic rename isn't supported. +- Browser-side storage (localStorage, File System Access API for exports) is deliberately out of scope — this feature targets server-side persistence only. +- The `collaboration/object-store.ts` migration is the most nuanced because it deals with binary data (`Uint8Array`). The `readBytes`/`writeBytes` methods on the interface handle this. diff --git a/docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/e2e-feature-workspace-recent-changes-panel-857b7bc9.md b/docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/e2e-feature-workspace-recent-changes-panel-857b7bc9.md new file mode 100644 index 0000000..d86cb52 --- /dev/null +++ b/docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/e2e-feature-workspace-recent-changes-panel-857b7bc9.md @@ -0,0 +1,78 @@ +# E2E Test Specification: Workspace Recent Changes Panel + +## User Story +Validate that a returning user sees a changes panel on the workspace dashboard showing node-level changes made by other users since their last visit. + +## Preconditions +- The application is running at `http://localhost:3000`. +- No pre-existing workspace data (clean slate or known workspace ID). + +## Test Steps + +### Setup via API + +1. **Create a workspace** via `POST /api/workspaces` with body `{ "name": "E2E Changes Test" }`. Capture `workspace.id`. +2. **Create a workflow** via `POST /api/workspaces/{id}/workflows` with body `{ "name": "My Workflow" }`. Capture `workflow.id`. +3. **Save workflow with initial nodes** via `PUT /api/workspaces/{id}/workflows/{wid}` with body: + ```json + { + "lastModifiedBy": "Alice", + "data": { + "name": "My Workflow", + "nodes": [ + { "id": "n1", "type": "start", "position": { "x": 0, "y": 0 }, "data": { "type": "start", "label": "Start", "name": "Start" } }, + { "id": "n2", "type": "prompt", "position": { "x": 200, "y": 0 }, "data": { "type": "prompt", "label": "Ask Question", "name": "Ask Question", "promptText": "", "detectedVariables": [], "brainDocId": null } } + ], + "edges": [], + "ui": { "sidebarOpen": true, "minimapVisible": false, "viewport": { "x": 0, "y": 0, "zoom": 1 } } + } + } + ``` +4. **Wait briefly** (500ms), then **save again** with an added node and `lastModifiedBy: "Bob"`: + ```json + { + "lastModifiedBy": "Bob", + "data": { + "name": "My Workflow", + "nodes": [ + { "id": "n1", "type": "start", "position": { "x": 0, "y": 0 }, "data": { "type": "start", "label": "Start", "name": "Start" } }, + { "id": "n2", "type": "prompt", "position": { "x": 200, "y": 0 }, "data": { "type": "prompt", "label": "Ask Question", "name": "Ask Question", "promptText": "", "detectedVariables": [], "brainDocId": null } }, + { "id": "n3", "type": "script", "position": { "x": 400, "y": 0 }, "data": { "type": "script", "label": "Process Data", "name": "Process Data", "promptText": "", "detectedVariables": [] } } + ], + "edges": [], + "ui": { "sidebarOpen": true, "minimapVisible": false, "viewport": { "x": 0, "y": 0, "zoom": 1 } } + } + } + ``` + +### Browser Test Steps + +5. **Set localStorage** key `nexus:workspace-last-seen:{workspaceId}` to a timestamp **before** both saves (e.g., 1 hour ago). +6. **Navigate** to `/workspace/{workspaceId}`. +7. **Assert** the changes panel slides in from the right side of the viewport. +8. **Assert** the panel header shows a change count and "since {formatted date}". +9. **Assert** the workflow name "My Workflow" appears as a group header in the panel. +10. **Assert** individual change events show correct user names ("Alice", "Bob") and node names ("Start", "Ask Question", "Process Data"). +11. **Assert** colored initial badges are visible (round circles with first letter of user name). +12. **Click "Dismiss"** (the X button) — assert the panel slides out and is no longer visible. +13. **Reload the page** — assert the panel re-appears (last-seen was updated on the prior load, but the saves still happened after the original `since` time set in step 5; however, the new `since` from the markSeen call means only changes after the previous page load would show — depending on timing, panel may or may not appear. To guarantee it appears, reset localStorage again before reload). +14. **Screenshot capture** at: panel visible state, after dismiss. + +### No-Changes Scenario + +15. **Set localStorage** `nexus:workspace-last-seen:{workspaceId}` to the **current** time. +16. **Reload** the page. +17. **Assert** no changes panel appears. + +## Success Criteria +- Panel appears with correct change data grouped by workflow. +- Dismiss works — panel slides out and does not re-appear for the rest of the session. +- Colored initial badges use consistent color hashing (same name = same color). +- The `node_added` events for "Start", "Ask Question" (from Alice's save) and "Process Data" (from Bob's save) are all shown. +- No `node_moved` events appear when only position changes occur. +- Panel does not appear when `last-seen` is set to current time. + +## Edge Cases to Verify +- Empty workspace (no workflows) — no panel shown. +- Workflow with no snapshots — no panel shown. +- Very long node names — panel content scrolls. diff --git a/docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/patches/patch-feature-workspace-recent-changes-panel-857b7bc9-1.md b/docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/patches/patch-feature-workspace-recent-changes-panel-857b7bc9-1.md new file mode 100644 index 0000000..fc2ad42 --- /dev/null +++ b/docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/patches/patch-feature-workspace-recent-changes-panel-857b7bc9-1.md @@ -0,0 +1,64 @@ +# Patch: Differentiate Open Workspace and New Workspace actions + +## Metadata +adw_id: `docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/patches/patch-feature-workspace-recent-changes-panel-857b7bc9-1.md` +review_change_request: `The workspace management needs to be improved. We need to have a way to edit and select different workspaces. Right now, there's just a recent history dropdown, but that's confusing. "Open" should open a list of the workspaces you currently have, and "New" should create a new one. Right now they both do the same function.` + +## Issue Summary +**Original Plan:** docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/plan-feature-workspace-recent-changes-panel-857b7bc9.md +**Issue:** In `src/components/workspace/landing-page.tsx`, both the "Open Workspace" card button and the "New workspace" button call the same `handleNewWorkspace()` handler, which always creates a new workspace via `POST /api/workspaces`. There is no way to browse and select an existing workspace — the only path to existing workspaces is through the "Recent workspaces" list below, which is not intuitive. +**Solution:** +1. Add a `GET` handler to the `/api/workspaces` route that lists all workspace directories from disk. +2. Add a `listWorkspaces()` function to `server.ts`. +3. Change the "Open Workspace" button to open a dialog/sheet that fetches and displays all existing workspaces for selection. +4. Keep the "New workspace" button as-is (creates a new workspace). + +## Files to Modify + +- **`src/lib/workspace/server.ts`** — Add `listWorkspaces()` function to scan the data directory for workspace manifests. +- **`src/app/api/workspaces/route.ts`** — Add `GET` handler that calls `listWorkspaces()`. +- **`src/components/workspace/landing-page.tsx`** — Change "Open Workspace" button to open a workspace picker dialog instead of creating a new workspace. Add workspace picker dialog with loading state, empty state, and clickable workspace entries. + +## Implementation Steps +IMPORTANT: Execute every step in order, top to bottom. + +### Step 1: Add `listWorkspaces()` to server.ts +- In `src/lib/workspace/server.ts`, add a new exported function `listWorkspaces()` that: + 1. Reads the workspace data directory (`getWorkspaceConfig().dataDir`). + 2. Lists subdirectories using `fs.readdir` with `withFileTypes: true`. + 3. For each subdirectory, attempts to read its `manifest.json` via `readJsonFile`. + 4. Returns an array of `WorkspaceRecord` objects (id, name, createdAt, updatedAt) sorted by `updatedAt` descending. + 5. Gracefully skips directories without a valid manifest. + +### Step 2: Add GET handler to `/api/workspaces` route +- In `src/app/api/workspaces/route.ts`, add a `GET` handler: + - Calls `listWorkspaces()` from `server.ts`. + - Returns `{ workspaces: WorkspaceRecord[] }` as JSON. + - Wraps in try/catch with 500 error handling, matching existing POST handler pattern. + +### Step 3: Update landing page with workspace picker +- In `src/components/workspace/landing-page.tsx`: + - Add `showPicker` state (boolean, default false). + - Change the "Open Workspace" button's `onClick` to set `showPicker(true)`. + - Add an inline workspace picker section (rendered conditionally when `showPicker` is true) that: + 1. Fetches `GET /api/workspaces` on open via a `useEffect`. + 2. Shows a loading spinner while fetching. + 3. If no workspaces exist, shows "No workspaces yet" empty state with a prompt to create one. + 4. Lists workspaces as clickable rows (name, last updated time) — clicking navigates to `/workspace/{id}`. + 5. Has a "Cancel" or close button to hide the picker. + - Use existing theme tokens (`BG_SURFACE`, `BORDER_DEFAULT`, `TEXT_PRIMARY`, `TEXT_MUTED`) and patterns from `recent-workspaces.tsx` for consistent styling. + - Keep the "New workspace" button unchanged — it continues to call `handleNewWorkspace()`. + +## Validation +Execute every command to validate the patch is complete with zero regressions. + +```bash +bun run typecheck +bun run lint +bun run build +``` + +## Patch Scope +**Lines of code to change:** ~80-100 +**Risk level:** low +**Testing required:** Manual verification that "Open Workspace" shows a picker of existing workspaces, "New workspace" creates a new workspace, and existing recent workspaces list still works. diff --git a/docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/plan-feature-workspace-recent-changes-panel-857b7bc9.md b/docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/plan-feature-workspace-recent-changes-panel-857b7bc9.md new file mode 100644 index 0000000..ad54b02 --- /dev/null +++ b/docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/plan-feature-workspace-recent-changes-panel-857b7bc9.md @@ -0,0 +1,240 @@ +# feature: Workspace Recent Changes Panel + +## Metadata +adw_id: `857b7bc9` +issue_description: `Workspace Recent Changes — snapshot-per-save system, server-side diff computation, per-browser last-seen tracking, and a dashboard changes panel that surfaces node-level workflow changes since last visit.` + +## Description +When a team member returns to a workspace after time away, they have no visibility into what changed while they were gone. This feature adds a lightweight audit trail via periodic server snapshots (triggered on every PUT workflow save) and a dashboard-side diff panel that surfaces workflow-level summaries (who edited, when) and expandable node-level changes (added, removed, renamed nodes) since the user's last visit. + +## Objective +Implement the full snapshot + diff + changes panel pipeline so that returning users see a "what changed" panel on the workspace dashboard, showing per-workflow node-level events (added, deleted, renamed) attributed to the user who saved them. + +## Problem Statement +Returning workspace users have zero visibility into changes made by teammates while they were away. They must manually open each workflow and inspect it to understand what changed, which is slow and error-prone. + +## Solution Statement +1. **Snapshot system**: On every `PUT /api/workspaces/[id]/workflows/[wid]` save, write an append-only timestamped snapshot of the workflow JSON to disk. +2. **Diff computation API**: A new `GET /api/workspaces/[id]/changes?since=...` endpoint walks snapshots chronologically, diffs adjacent pairs at the node level, and returns structured change events. +3. **Last-seen tracking**: Per-browser localStorage key tracks when the user last opened the dashboard; used as the `since` baseline. +4. **Changes panel UI**: A slide-in panel on the dashboard shows grouped node-level changes with user attribution and colored initial badges. + +## Code Patterns to Follow +Reference implementations: +- **Server file operations**: `src/lib/workspace/server.ts` — `writeJsonFile`, `readJsonFile`, `ensureDir`, atomic file writes, manifest read/update pattern. +- **API route pattern**: `src/app/api/workspaces/[id]/workflows/[wid]/route.ts` — Zod validation, try/catch, `NextResponse.json`. +- **Dashboard components**: `src/components/workspace/dashboard.tsx`, `workflow-card.tsx` — theme tokens, responsive grid, component composition. +- **Color hashing**: `src/lib/collaboration/awareness-names.ts` — `getColorForClientId()` for deterministic color from a name string. +- **Hooks pattern**: `src/hooks/use-workspace.ts` — fetch + state + loading/error + refetch. +- **Zod schemas**: `src/lib/workspace/schemas.ts` — import from `"zod/v4"`. +- **Theme tokens**: `src/lib/theme.ts` — `BG_APP`, `BG_SURFACE`, `TEXT_PRIMARY`, `TEXT_MUTED`, `BORDER_DEFAULT`. +- **Workspace types**: `src/lib/workspace/types.ts` — interface-based type definitions. +- **Workspace config**: `src/lib/workspace/config.ts` — `getWorkspaceConfig().dataDir` for data directory path. + +## Relevant Files +Use these files to complete the task: + +### Existing Files to Modify +- **`src/lib/workspace/server.ts`** — Add `writeSnapshot()` call inside `saveWorkflow()`, plus new functions: `listSnapshots()`, `getSnapshot()`, `computeChanges()`. +- **`src/lib/workspace/types.ts`** — Add snapshot and change event type definitions. +- **`src/app/api/workspaces/[id]/workflows/[wid]/route.ts`** — Modify PUT handler to call snapshot writer after save. +- **`src/components/workspace/dashboard.tsx`** — Integrate changes fetch, last-seen read/write, and render the changes panel. +- **`src/hooks/use-workspace.ts`** — Optionally extend or keep separate; the changes fetch may be a dedicated hook. + +### Existing Files to Read (Reference Only) +- **`CLAUDE.md`** — Project conventions, import rules (`@/*` alias, `zod/v4`), dark theme, guardrails. +- **`src/lib/workspace/config.ts`** — `getWorkspaceConfig().dataDir` for building snapshot paths. +- **`src/lib/workspace/schemas.ts`** — Zod schema pattern to follow for new schemas. +- **`src/lib/collaboration/awareness-names.ts`** — `getColorForClientId()` and `HUE_SLOTS` for badge colors. Need a name-based variant since changes panel uses display names, not client IDs. +- **`src/lib/theme.ts`** — Theme tokens for consistent styling. +- **`src/components/workspace/workflow-card.tsx`** — Card styling patterns. +- **`src/components/workspace/workspace-header.tsx`** — Header layout pattern. + +### New Files +- **`src/app/api/workspaces/[id]/workflows/[wid]/snapshots/route.ts`** — `GET` handler returning snapshot metadata list (FR-4). +- **`src/app/api/workspaces/[id]/workflows/[wid]/snapshots/[timestamp]/route.ts`** — `GET` handler returning full snapshot JSON (FR-5). +- **`src/app/api/workspaces/[id]/changes/route.ts`** — `GET` handler computing and returning diff events (FR-9). +- **`src/components/workspace/changes-panel.tsx`** — The slide-in changes panel UI component (FR-14–FR-20). +- **`src/hooks/use-workspace-changes.ts`** — Hook for fetching changes and managing last-seen state (FR-6–FR-8, FR-21–FR-22). +- **`src/lib/workspace/snapshots.ts`** — Server-side snapshot read/write/diff logic (FR-1–FR-3, FR-10–FR-13). +- **`docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/e2e-feature-workspace-recent-changes-panel-857b7bc9.md`** — E2E test specification. + +## Implementation Plan + +### Phase 1: Foundation +- Define TypeScript types for snapshots and change events. +- Implement snapshot file read/write utilities in a new `src/lib/workspace/snapshots.ts` module. +- Add snapshot writing to the existing `saveWorkflow()` function in `server.ts`. + +### Phase 2: Core Implementation +- Build the diff computation engine that walks adjacent snapshot pairs and detects node_added, node_deleted, node_renamed events. +- Create the three new API routes: snapshot list, snapshot detail, and changes endpoint. +- Create the `useWorkspaceChanges` hook with last-seen localStorage management. + +### Phase 3: Integration +- Build the changes panel UI component with slide-in animation, grouped layout, dismiss behavior, and colored initial badges. +- Integrate the changes panel into the dashboard component following the load sequence defined in FR-21. +- Wire up the last-seen timestamp write to occur after both manifest and changes have been fetched and rendered. + +## Step by Step Tasks +IMPORTANT: Execute every step in order, top to bottom. + +### 1. Define Snapshot and Change Event Types +- In `src/lib/workspace/types.ts`, add: + - `SnapshotMeta`: `{ timestamp: string; savedBy: string }` + - `SnapshotFile`: `{ timestamp: string; workflowId: string; workspaceId: string; savedBy: string; data: WorkflowJSON }` + - `ChangeEventType`: `"node_added" | "node_deleted" | "node_renamed"` + - `ChangeEvent`: `{ type: ChangeEventType; nodeName: string; from?: string; to?: string; by: string; at: string }` + - `WorkflowChanges`: `{ workflowId: string; workflowName: string; changeCount: number; events: ChangeEvent[] }` + - `ChangesResponse`: `{ changes: WorkflowChanges[] }` + +### 2. Implement Snapshot Read/Write Utilities +- Create `src/lib/workspace/snapshots.ts` with: + - `snapshotsDir(workspaceId, workflowId)` — returns `{dataDir}/{workspaceId}/snapshots/{workflowId}/` + - `writeSnapshot(workspaceId, workflowId, data: WorkflowJSON, savedBy: string)` — writes `{ timestamp, workflowId, workspaceId, savedBy, data }` to `{snapshotsDir}/{urlSafeTimestamp}.json`. Use atomic write: write to `.tmp` file then `fs.rename()`. + - `listSnapshots(workspaceId, workflowId)` — reads directory, parses filenames back to timestamps, returns `SnapshotMeta[]` sorted chronologically. + - `getSnapshot(workspaceId, workflowId, timestamp)` — reads and returns the full `SnapshotFile`. + - URL-safe encoding: replace colons with dashes in ISO timestamp for filename safety (e.g., `2026-04-10T12-30-00.000Z.json`). + +### 3. Hook Snapshot Writing into saveWorkflow +- In `src/lib/workspace/server.ts`, import `writeSnapshot` from `./snapshots`. +- Inside the `saveWorkflow()` function, after writing the workflow JSON and manifest, call `await writeSnapshot(workspaceId, workflowId, data, lastModifiedBy)`. + +### 4. Implement Diff Computation Engine +- In `src/lib/workspace/snapshots.ts`, add: + - `computeChanges(workspaceId, since: string)` that: + 1. Reads the workspace manifest to get all workflow IDs and names. + 2. For each workflow, lists snapshots and filters to those after `since`. + 3. Finds the snapshot immediately before `since` (or treats empty node set as baseline if none exists). + 4. Walks adjacent snapshot pairs chronologically. + 5. For each pair, extracts node sets (by `id`), computes: + - `node_added`: node ID in newer but not older. + - `node_deleted`: node ID in older but not newer. + - `node_renamed`: node ID in both but `data.label` (or node name field) changed. + 6. Each event gets `by` from the later snapshot's `savedBy` and `at` from its timestamp. + 7. Excludes `node_moved` (position-only changes). + 8. Skips workflows with no snapshots after `since` (FR-11). + 9. Returns `ChangesResponse` with `changeCount` as total events per workflow. + +### 5. Create Snapshot API Routes +- Create `src/app/api/workspaces/[id]/workflows/[wid]/snapshots/route.ts`: + - `GET` handler calls `listSnapshots(id, wid)` and returns `SnapshotMeta[]`. + - Set `export const dynamic = "force-dynamic"`. +- Create `src/app/api/workspaces/[id]/workflows/[wid]/snapshots/[timestamp]/route.ts`: + - `GET` handler calls `getSnapshot(id, wid, timestamp)`, returns full snapshot or 404. + - Decode the URL-safe timestamp from the route param. + - Set `export const dynamic = "force-dynamic"`. + +### 6. Create Changes API Route +- Create `src/app/api/workspaces/[id]/changes/route.ts`: + - `GET` handler reads `since` query parameter. + - Validates `since` is a valid ISO timestamp; returns 400 if missing/invalid. + - Calls `computeChanges(id, since)` and returns the result. + - Set `export const dynamic = "force-dynamic"`. + +### 7. Create useWorkspaceChanges Hook +- Create `src/hooks/use-workspace-changes.ts`: + - Accepts `workspaceId: string` and `isReady: boolean` (gates fetch until manifest is loaded). + - On mount (when `isReady` is true): + 1. Read `nexus:workspace-last-seen:{workspaceId}` from localStorage → `since`. If absent, default to 24 hours ago. + 2. Fetch `GET /api/workspaces/{workspaceId}/changes?since={since}`. + 3. Store the result in state. + 4. Return `{ changes, isLoading, since, markSeen }`. + - `markSeen()` writes current UTC timestamp to `nexus:workspace-last-seen:{workspaceId}`. + - The hook does NOT call `markSeen()` automatically — the dashboard calls it after rendering. + +### 8. Build the Changes Panel Component +- Create `src/components/workspace/changes-panel.tsx`: + - Props: `changes: WorkflowChanges[]`, `since: string`, `onDismiss: () => void`. + - Panel slides in from the right using a CSS `translate-x` transition (not a modal, does not block the workflow grid). + - Header: "N changes since {formatted date}" with a "Dismiss" button. + - Body: Grouped by workflow. Each group has a workflow name header. Under each group, list individual change events. + - Each event line: colored initial badge (first letter of `by` name, using a name-based hash into the same `HUE_SLOTS` array from `awareness-names.ts`), bold user name, action text ("added Send Notification", "renamed Script 1 -> Validate Input", "deleted Old Transform"), node name. + - Panel is scrollable if content exceeds viewport height. + - Create a `getColorForName(name: string)` utility (hash name string to a number, mod by HUE_SLOTS length) — co-locate in the component or in `awareness-names.ts` alongside the existing `getColorForClientId`. + +### 9. Integrate Changes Panel into Dashboard +- In `src/components/workspace/dashboard.tsx`: + - Import and use `useWorkspaceChanges(workspaceId, !isLoading && !!workspace)`. + - Add `dismissed` state (boolean, default false). + - After the workspace manifest loads and changes are fetched: + - If changes are non-empty and not dismissed, render `` alongside the workflow grid (not blocking it). + - Call `markSeen()` once both workspace data and changes response are available and rendered (FR-6, FR-21 step 6). Use a `useEffect` that depends on workspace and changes being loaded. + - The panel should not appear during loading state. + - When dismissed, set `dismissed = true` — panel does not re-appear for this session (page load). + +### 10. Create E2E Test Specification +- Create `docs/tasks/feature-workspace-recent-changes-panel-857b7bc9/e2e-feature-workspace-recent-changes-panel-857b7bc9.md` with: + - **User Story**: Validate that a returning user sees a changes panel on the workspace dashboard showing node-level changes made by other users since their last visit. + - **Test Steps** (using playwright-cli): + 1. Create a workspace via API `POST /api/workspaces`. + 2. Create a workflow via API `POST /api/workspaces/{id}/workflows`. + 3. Save the workflow with some nodes via `PUT /api/workspaces/{id}/workflows/{wid}` with `lastModifiedBy: "Alice"`. + 4. Wait briefly, then save again with an added node and `lastModifiedBy: "Bob"`. + 5. Set localStorage `nexus:workspace-last-seen:{workspaceId}` to a timestamp before both saves. + 6. Navigate to `/workspace/{id}`. + 7. Assert the changes panel slides in from the right. + 8. Assert the panel header shows "N changes since {date}". + 9. Assert the workflow name appears as a group header. + 10. Assert individual change events show correct user names and node names. + 11. Assert colored initial badges are visible. + 12. Click "Dismiss" — assert panel slides out and is no longer visible. + 13. Reload the page — assert panel re-appears (last-seen was written on prior load, but changes still exist since before that). + 14. Screenshot capture at: panel visible state, after dismiss. + - **Success Criteria**: Panel appears with correct change data, dismiss works, colors match awareness system hashing. + - **No-changes scenario**: Set last-seen to current time, reload — assert no panel appears. + +### 11. Run Validation Commands +- `bun run typecheck` — ensure zero type errors. +- `bun run lint` — ensure zero lint errors. +- `bun run build` — ensure successful production build. + +## Testing Strategy + +### Unit Tests +- `src/lib/workspace/__tests__/snapshots.test.ts`: + - Test `writeSnapshot` creates correct file with correct structure. + - Test `listSnapshots` returns sorted metadata. + - Test `getSnapshot` returns full data. + - Test `computeChanges` with various scenarios: no snapshots, single snapshot, multiple snapshots with adds/deletes/renames. + - Test node identity by `id` — position-only changes produce no events. + - Test `since` filtering — only snapshots after `since` are considered. + - Test baseline snapshot selection (immediately before `since`). + +### Edge Cases +- Workflow with no snapshots after `since` — excluded from response. +- No prior snapshot before `since` — baseline is empty node set (all nodes in first snapshot after `since` are `node_added`). +- Same node added then deleted across multiple snapshots — both events recorded (no deduplication per FR-12). +- First-time visitor (no localStorage key) — `since` defaults to 24 hours ago. +- Empty workspace (no workflows) — changes response is `{ changes: [] }`, panel not shown. +- Very long node names or many changes — panel must scroll. +- Concurrent saves — snapshot filenames are timestamped to millisecond; extremely unlikely collision. +- URL-safe timestamp encoding/decoding round-trips correctly. + +## Acceptance Criteria +- [ ] AC-1: Opening a workspace after another user has saved changes shows the changes panel with their display name and the affected node names. +- [ ] AC-2: Opening a workspace with no changes since last visit shows no changes panel. +- [ ] AC-3: Dismissing the changes panel hides it for the rest of the session; it re-appears on the next page load if changes still exist. +- [ ] AC-4: The last-seen timestamp updates after each dashboard load, so subsequent visits show only newer changes. +- [ ] AC-5: Node additions, deletions, and renames are all correctly detected and attributed. +- [ ] AC-6: `node_moved`-only saves do not produce change events in the panel. +- [ ] AC-7: `GET /api/workspaces/[id]/changes?since=...` returns a correctly structured response matching the schema in FR-9. +- [ ] AC-8: Each change event in the panel shows a colored initial badge using the same color hashing as the awareness system. +- [ ] AC-9: `bun run typecheck` and `bun run build` pass with no new errors. + +## Validation Commands +Execute every command to validate the work is complete with zero regressions. + +```bash +bun run typecheck +bun run lint +bun run build +``` + +## Notes +- The snapshot path structure is `{dataDir}/{workspaceId}/snapshots/{workflowId}/{timestamp}.json` — nested under the workspace data directory alongside the existing `workflows/` directory. +- The `savedBy` field comes from the PUT request body's `lastModifiedBy` field, which is populated from `nexus:collab-name` localStorage on the client. +- For the name-based color hash, use a simple string hash (e.g., sum of char codes) modulo 8 into the same `HUE_SLOTS` array. This gives visual consistency: the same display name always gets the same color badge, matching what they'd see in the awareness/collaboration UI. +- Atomic snapshot writes (write to `.tmp` then rename) prevent partial reads during concurrent diff computation. +- The changes panel does not block the workflow grid — it is rendered alongside it (e.g., as an absolutely positioned or flex-adjacent panel on the right). +- Retention/pruning of old snapshots is explicitly out of scope for this feature. diff --git a/scripts/collab-server.ts b/scripts/collab-server.ts index a1c3546..17e61cd 100644 --- a/scripts/collab-server.ts +++ b/scripts/collab-server.ts @@ -2,6 +2,7 @@ import path from "node:path"; import { Server } from "@hocuspocus/server"; import * as Y from "yjs"; import { CollabObjectStore } from "../src/lib/collaboration/object-store"; +import { LocalFilesystemProvider } from "../src/lib/storage/local-provider"; function readPort(): number { const raw = process.env.NEXUS_COLLAB_SERVER_PORT ?? "1234"; @@ -24,7 +25,8 @@ function readDebounceMs(): number { async function main(): Promise { const port = readPort(); const dataDir = process.env.NEXUS_COLLAB_DATA_DIR ?? path.join(process.cwd(), ".nexus-collab"); - const objectStore = new CollabObjectStore(dataDir); + const provider = new LocalFilesystemProvider(dataDir); + const objectStore = new CollabObjectStore(provider); const server = new Server({ port, diff --git a/src/app/api/workspaces/[id]/changes/route.ts b/src/app/api/workspaces/[id]/changes/route.ts new file mode 100644 index 0000000..fa2a834 --- /dev/null +++ b/src/app/api/workspaces/[id]/changes/route.ts @@ -0,0 +1,30 @@ +import { NextResponse } from "next/server"; +import { computeChanges } from "@/lib/workspace/snapshots"; + +export const dynamic = "force-dynamic"; + +type RouteParams = { params: Promise<{ id: string }> }; + +export async function GET(request: Request, { params }: RouteParams) { + try { + const { id } = await params; + const url = new URL(request.url); + const since = url.searchParams.get("since"); + + if (!since) { + return NextResponse.json({ error: "Missing required 'since' query parameter" }, { status: 400 }); + } + + // Validate ISO timestamp + const parsed = Date.parse(since); + if (isNaN(parsed)) { + return NextResponse.json({ error: "Invalid 'since' timestamp — must be ISO 8601" }, { status: 400 }); + } + + const result = await computeChanges(id, since); + return NextResponse.json(result); + } catch (error) { + const message = error instanceof Error ? error.message : "Failed to compute changes"; + return NextResponse.json({ error: message }, { status: 500 }); + } +} diff --git a/src/app/api/workspaces/[id]/route.ts b/src/app/api/workspaces/[id]/route.ts index 4049dc3..1be228c 100644 --- a/src/app/api/workspaces/[id]/route.ts +++ b/src/app/api/workspaces/[id]/route.ts @@ -1,6 +1,6 @@ import { NextResponse } from "next/server"; import { UpdateWorkspaceSchema } from "@/lib/workspace/schemas"; -import { getWorkspace, updateWorkspace } from "@/lib/workspace/server"; +import { deleteWorkspace, getWorkspace, updateWorkspace } from "@/lib/workspace/server"; export const dynamic = "force-dynamic"; @@ -45,3 +45,20 @@ export async function PATCH( return NextResponse.json({ error: message }, { status: 500 }); } } + +export async function DELETE( + _request: Request, + { params }: { params: Promise<{ id: string }> }, +) { + try { + const { id } = await params; + const deleted = await deleteWorkspace(id); + if (!deleted) { + return NextResponse.json({ error: "Workspace not found" }, { status: 404 }); + } + return new NextResponse(null, { status: 204 }); + } catch (error) { + const message = error instanceof Error ? error.message : "Failed to delete workspace"; + return NextResponse.json({ error: message }, { status: 500 }); + } +} diff --git a/src/app/api/workspaces/[id]/workflows/[wid]/snapshots/[timestamp]/route.ts b/src/app/api/workspaces/[id]/workflows/[wid]/snapshots/[timestamp]/route.ts new file mode 100644 index 0000000..6da3734 --- /dev/null +++ b/src/app/api/workspaces/[id]/workflows/[wid]/snapshots/[timestamp]/route.ts @@ -0,0 +1,29 @@ +import { NextResponse } from "next/server"; +import { getSnapshot } from "@/lib/workspace/snapshots"; + +export const dynamic = "force-dynamic"; + +type RouteParams = { params: Promise<{ id: string; wid: string; timestamp: string }> }; + +export async function GET(_request: Request, { params }: RouteParams) { + try { + const { id, wid, timestamp } = await params; + // Decode URL-safe timestamp back to ISO + const tIndex = timestamp.indexOf("T"); + let isoTimestamp = timestamp; + if (tIndex >= 0) { + const datePart = timestamp.slice(0, tIndex); + const timePart = timestamp.slice(tIndex).replace(/-/g, ":"); + isoTimestamp = datePart + timePart; + } + + const snapshot = await getSnapshot(id, wid, isoTimestamp); + if (!snapshot) { + return NextResponse.json({ error: "Snapshot not found" }, { status: 404 }); + } + return NextResponse.json(snapshot); + } catch (error) { + const message = error instanceof Error ? error.message : "Failed to read snapshot"; + return NextResponse.json({ error: message }, { status: 500 }); + } +} diff --git a/src/app/api/workspaces/[id]/workflows/[wid]/snapshots/route.ts b/src/app/api/workspaces/[id]/workflows/[wid]/snapshots/route.ts new file mode 100644 index 0000000..58641b8 --- /dev/null +++ b/src/app/api/workspaces/[id]/workflows/[wid]/snapshots/route.ts @@ -0,0 +1,17 @@ +import { NextResponse } from "next/server"; +import { listSnapshots } from "@/lib/workspace/snapshots"; + +export const dynamic = "force-dynamic"; + +type RouteParams = { params: Promise<{ id: string; wid: string }> }; + +export async function GET(_request: Request, { params }: RouteParams) { + try { + const { id, wid } = await params; + const metas = await listSnapshots(id, wid); + return NextResponse.json(metas); + } catch (error) { + const message = error instanceof Error ? error.message : "Failed to list snapshots"; + return NextResponse.json({ error: message }, { status: 500 }); + } +} diff --git a/src/app/api/workspaces/route.ts b/src/app/api/workspaces/route.ts index e2fcce0..620d62f 100644 --- a/src/app/api/workspaces/route.ts +++ b/src/app/api/workspaces/route.ts @@ -1,9 +1,19 @@ import { NextResponse } from "next/server"; import { CreateWorkspaceSchema } from "@/lib/workspace/schemas"; -import { createWorkspace } from "@/lib/workspace/server"; +import { createWorkspace, listWorkspaces } from "@/lib/workspace/server"; export const dynamic = "force-dynamic"; +export async function GET() { + try { + const workspaces = await listWorkspaces(); + return NextResponse.json({ workspaces }); + } catch (error) { + const message = error instanceof Error ? error.message : "Failed to list workspaces"; + return NextResponse.json({ error: message }, { status: 500 }); + } +} + export async function POST(request: Request) { try { const parsed = CreateWorkspaceSchema.safeParse(await request.json().catch(() => ({}))); diff --git a/src/components/workspace/changes-panel.tsx b/src/components/workspace/changes-panel.tsx new file mode 100644 index 0000000..c3a6b86 --- /dev/null +++ b/src/components/workspace/changes-panel.tsx @@ -0,0 +1,129 @@ +"use client"; + +import { X, Plus, Trash2, PenLine } from "lucide-react"; +import { BG_SURFACE, BORDER_DEFAULT, TEXT_PRIMARY, TEXT_MUTED } from "@/lib/theme"; +import type { WorkflowChanges, ChangeEvent } from "@/lib/workspace/types"; + +// Same 8 hue slots used in awareness-names.ts +const HUE_SLOTS = [ + { color: "#7c3aed", colorLight: "#ede9fe" }, // violet + { color: "#0284c7", colorLight: "#e0f2fe" }, // sky + { color: "#d97706", colorLight: "#fef3c7" }, // amber + { color: "#059669", colorLight: "#d1fae5" }, // emerald + { color: "#e11d48", colorLight: "#ffe4e6" }, // rose + { color: "#4f46e5", colorLight: "#e0e7ff" }, // indigo + { color: "#ea580c", colorLight: "#ffedd5" }, // orange + { color: "#0d9488", colorLight: "#ccfbf1" }, // teal +]; + +function getColorForName(name: string): { color: string; colorLight: string } { + let hash = 0; + for (let i = 0; i < name.length; i++) { + hash += name.charCodeAt(i); + } + return HUE_SLOTS[hash % HUE_SLOTS.length]; +} + +function formatSinceDate(iso: string): string { + try { + const date = new Date(iso); + return date.toLocaleDateString(undefined, { + month: "short", + day: "numeric", + hour: "2-digit", + minute: "2-digit", + }); + } catch { + return iso; + } +} + +function EventIcon({ type }: { type: ChangeEvent["type"] }) { + switch (type) { + case "node_added": + return ; + case "node_deleted": + return ; + case "node_renamed": + return ; + } +} + +function eventDescription(event: ChangeEvent): string { + switch (event.type) { + case "node_added": + return `added ${event.nodeName}`; + case "node_deleted": + return `deleted ${event.nodeName}`; + case "node_renamed": + return `renamed ${event.from} → ${event.to}`; + } +} + +interface ChangesPanelProps { + changes: WorkflowChanges[]; + since: string; + onDismiss: () => void; +} + +export function ChangesPanel({ changes, since, onDismiss }: ChangesPanelProps) { + const totalChanges = changes.reduce((sum, wf) => sum + wf.changeCount, 0); + + return ( +
+ {/* Header */} +
+
+

+ {totalChanges} change{totalChanges !== 1 ? "s" : ""} +

+

since {formatSinceDate(since)}

+
+ +
+ + {/* Body */} +
+ {changes.map((wf) => ( +
+

+ {wf.workflowName} +

+
+ {wf.events.map((event, i) => { + const { color } = getColorForName(event.by); + const initial = event.by.charAt(0).toUpperCase(); + return ( +
+ {/* Colored initial badge */} +
+ {initial} +
+
+ + + {event.by}{" "} + {eventDescription(event)} + +
+
+ ); + })} +
+
+ ))} +
+
+ ); +} diff --git a/src/components/workspace/dashboard.tsx b/src/components/workspace/dashboard.tsx index 3191771..e0ae945 100644 --- a/src/components/workspace/dashboard.tsx +++ b/src/components/workspace/dashboard.tsx @@ -1,14 +1,16 @@ "use client"; -import { useEffect } from "react"; +import { useEffect, useState } from "react"; import { useRouter } from "next/navigation"; import { Plus, Loader2 } from "lucide-react"; import { useWorkspace } from "@/hooks/use-workspace"; +import { useWorkspaceChanges } from "@/hooks/use-workspace-changes"; import { addRecentWorkspace } from "@/lib/workspace/local-history"; import { BG_APP, TEXT_PRIMARY, TEXT_MUTED, BORDER_DEFAULT } from "@/lib/theme"; import { WorkspaceHeader } from "./workspace-header"; import { WorkflowCard } from "./workflow-card"; import { EmptyState } from "./empty-state"; +import { ChangesPanel } from "./changes-panel"; interface WorkspaceDashboardProps { workspaceId: string; @@ -17,6 +19,18 @@ interface WorkspaceDashboardProps { export function WorkspaceDashboard({ workspaceId }: WorkspaceDashboardProps) { const router = useRouter(); const { workspace, workflows, isLoading, error, refetch } = useWorkspace(workspaceId); + const { changes, isLoading: changesLoading, since, markSeen } = useWorkspaceChanges( + workspaceId, + !isLoading && !!workspace, + ); + const [dismissed, setDismissed] = useState(false); + + // Mark seen once both workspace and changes are loaded + useEffect(() => { + if (workspace && !changesLoading) { + markSeen(); + } + }, [workspace, changesLoading, markSeen]); useEffect(() => { if (workspace) { @@ -61,6 +75,8 @@ export function WorkspaceDashboard({ workspaceId }: WorkspaceDashboardProps) { ); } + const showChangesPanel = !dismissed && !changesLoading && changes.length > 0; + return (
-
+ {showChangesPanel && ( + setDismissed(true)} + /> + )} + +
{workflows.length === 0 ? ( ) : ( diff --git a/src/components/workspace/landing-page.tsx b/src/components/workspace/landing-page.tsx index d21578b..140a610 100644 --- a/src/components/workspace/landing-page.tsx +++ b/src/components/workspace/landing-page.tsx @@ -1,15 +1,50 @@ "use client"; -import { useState } from "react"; +import { useState, useEffect, useCallback } from "react"; import { useRouter } from "next/navigation"; -import { Pencil, Users, Plus, Loader2 } from "lucide-react"; +import { Pencil, Users, Plus, Loader2, X, Clock, FolderOpen, Trash2 } from "lucide-react"; import { Button } from "@/components/ui/button"; +import { ConfirmDialog } from "@/components/ui/confirm-dialog"; import { BG_APP, BG_SURFACE, BORDER_DEFAULT, TEXT_PRIMARY, TEXT_MUTED } from "@/lib/theme"; +import { removeRecentWorkspace } from "@/lib/workspace/local-history"; import { RecentWorkspaces } from "./recent-workspaces"; +import { toast } from "sonner"; + +interface WorkspaceEntry { + id: string; + name: string; + createdAt: string; + updatedAt: string; +} export function LandingPage() { const router = useRouter(); const [creating, setCreating] = useState(false); + const [showPicker, setShowPicker] = useState(false); + const [pickerLoading, setPickerLoading] = useState(false); + const [pickerWorkspaces, setPickerWorkspaces] = useState([]); + const [deleteTarget, setDeleteTarget] = useState(null); + const [deletingWorkspaceId, setDeletingWorkspaceId] = useState(null); + const [recentRefreshKey, setRecentRefreshKey] = useState(0); + + const fetchWorkspaces = useCallback(async () => { + setPickerLoading(true); + try { + const res = await fetch("/api/workspaces"); + if (res.ok) { + const { workspaces } = await res.json(); + setPickerWorkspaces(workspaces); + } + } finally { + setPickerLoading(false); + } + }, []); + + useEffect(() => { + if (showPicker) { + fetchWorkspaces(); + } + }, [showPicker, fetchWorkspaces]); const handleNewWorkspace = async () => { setCreating(true); @@ -27,6 +62,30 @@ export function LandingPage() { } }; + const handleDeleteWorkspace = async () => { + if (!deleteTarget || deletingWorkspaceId) return; + + const workspaceToDelete = deleteTarget; + setDeletingWorkspaceId(workspaceToDelete.id); + try { + const res = await fetch(`/api/workspaces/${workspaceToDelete.id}`, { + method: "DELETE", + }); + if (!res.ok) throw new Error("Failed to delete workspace"); + removeRecentWorkspace(workspaceToDelete.id); + setPickerWorkspaces((workspaces) => + workspaces.filter((workspace) => workspace.id !== workspaceToDelete.id), + ); + setRecentRefreshKey((key) => key + 1); + setDeleteTarget(null); + toast.success("Workspace deleted"); + } catch { + toast.error("Failed to delete workspace"); + } finally { + setDeletingWorkspaceId(null); + } + }; + return (
@@ -48,12 +107,12 @@ export function LandingPage() {
@@ -74,8 +133,93 @@ export function LandingPage() {
- + {showPicker && ( +
+
+

+ + Select a workspace +

+ +
+ + {pickerLoading ? ( +
+ +
+ ) : pickerWorkspaces.length === 0 ? ( +

+ No workspaces yet. Create one to get started. +

+ ) : ( +
+ {pickerWorkspaces.map((ws) => ( +
+ + +
+ ))} +
+ )} +
+ )} + +
+ + { + if (!open) setDeleteTarget(null); + }} + tone="danger" + title="Delete this workspace?" + description={ + deleteTarget ? ( + <> + This will permanently delete {deleteTarget.name} and + all of its workflows. + + ) : undefined + } + confirmLabel={deletingWorkspaceId ? "Deleting..." : "Delete workspace"} + onConfirm={() => { + void handleDeleteWorkspace(); + }} + /> ); } diff --git a/src/components/workspace/recent-workspaces.tsx b/src/components/workspace/recent-workspaces.tsx index fbca6a9..ea45d14 100644 --- a/src/components/workspace/recent-workspaces.tsx +++ b/src/components/workspace/recent-workspaces.tsx @@ -1,9 +1,12 @@ "use client"; -import { useState } from "react"; +import { useEffect, useState } from "react"; import { useRouter } from "next/navigation"; import { Clock, Workflow } from "lucide-react"; -import { getRecentWorkspaces } from "@/lib/workspace/local-history"; +import { + getRecentWorkspaces, + type RecentWorkspaceEntry, +} from "@/lib/workspace/local-history"; import { TEXT_MUTED, TEXT_SECONDARY, BORDER_DEFAULT } from "@/lib/theme"; function timeAgo(dateStr: string): string { @@ -17,9 +20,21 @@ function timeAgo(dateStr: string): string { return `${days}d ago`; } -export function RecentWorkspaces() { +interface RecentWorkspacesProps { + refreshKey?: number; +} + +export function RecentWorkspaces({ refreshKey = 0 }: RecentWorkspacesProps) { const router = useRouter(); - const [entries] = useState(() => getRecentWorkspaces()); + const [entries, setEntries] = useState([]); + + useEffect(() => { + const loadEntries = window.setTimeout(() => { + setEntries(getRecentWorkspaces()); + }, 0); + + return () => window.clearTimeout(loadEntries); + }, [refreshKey]); if (entries.length === 0) return null; diff --git a/src/components/workspace/workspace-header.tsx b/src/components/workspace/workspace-header.tsx index d279f2b..1d4457b 100644 --- a/src/components/workspace/workspace-header.tsx +++ b/src/components/workspace/workspace-header.tsx @@ -2,9 +2,11 @@ import { useState, useRef, useEffect, type KeyboardEvent } from "react"; import { useRouter } from "next/navigation"; -import { ArrowLeft, Share2, Check, PencilLine } from "lucide-react"; +import { ArrowLeft, Share2, Check, PencilLine, Trash2, Loader2 } from "lucide-react"; import { Button } from "@/components/ui/button"; +import { ConfirmDialog } from "@/components/ui/confirm-dialog"; import { BG_SURFACE, BORDER_DEFAULT, TEXT_PRIMARY, TEXT_MUTED } from "@/lib/theme"; +import { removeRecentWorkspace } from "@/lib/workspace/local-history"; import { toast } from "sonner"; interface WorkspaceHeaderProps { @@ -18,6 +20,8 @@ export function WorkspaceHeader({ workspaceId, name, onNameChange }: WorkspaceHe const [isEditing, setIsEditing] = useState(false); const [editValue, setEditValue] = useState(name); const [copied, setCopied] = useState(false); + const [deleteOpen, setDeleteOpen] = useState(false); + const [isDeleting, setIsDeleting] = useState(false); const inputRef = useRef(null); useEffect(() => { @@ -66,53 +70,100 @@ export function WorkspaceHeader({ workspaceId, name, onNameChange }: WorkspaceHe setTimeout(() => setCopied(false), 2000); }; + const handleDelete = async () => { + if (isDeleting) return; + + setIsDeleting(true); + try { + const res = await fetch(`/api/workspaces/${workspaceId}`, { + method: "DELETE", + }); + if (!res.ok) throw new Error("Failed to delete workspace"); + removeRecentWorkspace(workspaceId); + toast.success("Workspace deleted"); + router.push("/"); + } catch { + toast.error("Failed to delete workspace"); + setIsDeleting(false); + } + }; + return ( -
-
- + <> +
+
+ + +
+ {isEditing ? ( + setEditValue(e.target.value)} + onBlur={handleSave} + onKeyDown={handleKeyDown} + className={`w-full bg-transparent text-lg font-semibold ${TEXT_PRIMARY} outline-none`} + maxLength={100} + /> + ) : ( + + )} +

Workspace

+
+ + -
- {isEditing ? ( - setEditValue(e.target.value)} - onBlur={handleSave} - onKeyDown={handleKeyDown} - className={`w-full bg-transparent text-lg font-semibold ${TEXT_PRIMARY} outline-none`} - maxLength={100} - /> - ) : ( - - )} -

Workspace

+
+
- -
-
+ + This will permanently delete {name} and all of its workflows. + + } + confirmLabel={isDeleting ? "Deleting..." : "Delete workspace"} + onConfirm={() => { + void handleDelete(); + }} + /> + ); } diff --git a/src/hooks/use-workspace-changes.ts b/src/hooks/use-workspace-changes.ts new file mode 100644 index 0000000..3187da5 --- /dev/null +++ b/src/hooks/use-workspace-changes.ts @@ -0,0 +1,73 @@ +"use client"; + +import { useCallback, useEffect, useState } from "react"; +import type { WorkflowChanges } from "@/lib/workspace/types"; + +const LAST_SEEN_PREFIX = "nexus:workspace-last-seen:"; + +function getLastSeen(workspaceId: string): string { + if (typeof window === "undefined") return new Date().toISOString(); + const stored = localStorage.getItem(LAST_SEEN_PREFIX + workspaceId); + if (stored) return stored; + // Default to 24 hours ago + return new Date(Date.now() - 24 * 60 * 60 * 1000).toISOString(); +} + +interface UseWorkspaceChangesResult { + changes: WorkflowChanges[]; + isLoading: boolean; + since: string; + markSeen: () => void; +} + +export function useWorkspaceChanges( + workspaceId: string, + isReady: boolean, +): UseWorkspaceChangesResult { + const [changes, setChanges] = useState([]); + const [isLoading, setIsLoading] = useState(true); + const [since, setSince] = useState(""); + + useEffect(() => { + if (!isReady) return; + + const sinceValue = getLastSeen(workspaceId); + setSince(sinceValue); + + let cancelled = false; + + async function fetchChanges() { + setIsLoading(true); + try { + const res = await fetch( + `/api/workspaces/${workspaceId}/changes?since=${encodeURIComponent(sinceValue)}`, + ); + if (!res.ok) { + setChanges([]); + return; + } + const data = await res.json(); + if (!cancelled) { + setChanges(data.changes ?? []); + } + } catch { + if (!cancelled) setChanges([]); + } finally { + if (!cancelled) setIsLoading(false); + } + } + + fetchChanges(); + + return () => { + cancelled = true; + }; + }, [workspaceId, isReady]); + + const markSeen = useCallback(() => { + if (typeof window === "undefined") return; + localStorage.setItem(LAST_SEEN_PREFIX + workspaceId, new Date().toISOString()); + }, [workspaceId]); + + return { changes, isLoading, since, markSeen }; +} diff --git a/src/lib/__tests__/collaboration-object-store.test.ts b/src/lib/__tests__/collaboration-object-store.test.ts index e42d201..134d0f2 100644 --- a/src/lib/__tests__/collaboration-object-store.test.ts +++ b/src/lib/__tests__/collaboration-object-store.test.ts @@ -3,6 +3,7 @@ import os from "node:os"; import path from "node:path"; import { afterEach, beforeEach, describe, expect, it } from "bun:test"; import { CollabObjectStore } from "../collaboration/object-store"; +import { LocalFilesystemProvider } from "../storage/local-provider"; let tempDir = ""; @@ -18,7 +19,8 @@ describe("CollabObjectStore", () => { }); it("stores and reloads room state with metadata", async () => { - const store = new CollabObjectStore(tempDir); + const provider = new LocalFilesystemProvider(tempDir); + const store = new CollabObjectStore(provider); const state = new Uint8Array([1, 2, 3, 4]); await store.store("room-1", state); @@ -41,7 +43,8 @@ describe("CollabObjectStore", () => { }); it("returns null for rooms that have never been stored", async () => { - const store = new CollabObjectStore(tempDir); + const provider = new LocalFilesystemProvider(tempDir); + const store = new CollabObjectStore(provider); expect(await store.load("missing-room")).toBeNull(); }); }); diff --git a/src/lib/brain/server.ts b/src/lib/brain/server.ts index 52938d6..0b516f7 100644 --- a/src/lib/brain/server.ts +++ b/src/lib/brain/server.ts @@ -1,8 +1,7 @@ import { createHash, createHmac, timingSafeEqual } from "node:crypto"; -import fs from "node:fs/promises"; -import path from "node:path"; import { customAlphabet } from "nanoid"; import { getBrainConfig } from "./config"; +import { getStorageProvider } from "@/lib/storage"; import type { BrainManifest, BrainDocumentRecord, @@ -26,6 +25,10 @@ function nowIso(): string { return new Date().toISOString(); } +function storage() { + return getStorageProvider(); +} + function createEmptyManifest(): BrainManifest { return { version: 1, @@ -42,22 +45,18 @@ function toVersionSummary(doc: KnowledgeDoc): string { || doc.title; } -async function ensureDir(dir: string): Promise { - await fs.mkdir(dir, { recursive: true }); -} - -async function readJsonFile(filePath: string, fallback: T): Promise { +async function readJsonKey(key: string, fallback: T): Promise { + const raw = await storage().read(key); + if (raw === null) return fallback; try { - const raw = await fs.readFile(filePath, "utf8"); return JSON.parse(raw) as T; } catch { return fallback; } } -async function writeJsonFile(filePath: string, value: unknown): Promise { - await ensureDir(path.dirname(filePath)); - await fs.writeFile(filePath, JSON.stringify(value, null, 2), "utf8"); +async function writeJsonKey(key: string, value: unknown): Promise { + await storage().write(key, JSON.stringify(value, null, 2)); } function signValue(secret: string, payload: string): string { @@ -89,28 +88,26 @@ function decodeToken(secret: string, token: string): Record | nu } export class BrainStore { - private readonly dataDir = getBrainConfig().dataDir; private readonly tokenSecret = getBrainConfig().tokenSecret; - private manifestPath(): string { - return path.join(this.dataDir, MANIFEST_FILE); + private manifestKey(): string { + return MANIFEST_FILE; } - private liveDocPath(workspaceId: string, docId: string): string { - return path.join(this.dataDir, "live", workspaceId, `${docId}.json`); + private liveDocKey(workspaceId: string, docId: string): string { + return `live/${workspaceId}/${docId}.json`; } - private versionPath(workspaceId: string, docId: string, versionId: string): string { - return path.join(this.dataDir, "versions", workspaceId, docId, `${versionId}.json`); + private versionKey(workspaceId: string, docId: string, versionId: string): string { + return `versions/${workspaceId}/${docId}/${versionId}.json`; } private async readManifest(): Promise { - await ensureDir(this.dataDir); - return readJsonFile(this.manifestPath(), createEmptyManifest()); + return readJsonKey(this.manifestKey(), createEmptyManifest()); } private async writeManifest(manifest: BrainManifest): Promise { - await writeJsonFile(this.manifestPath(), manifest); + await writeJsonKey(this.manifestKey(), manifest); } private toPublicDoc(doc: BrainDocumentRecord): KnowledgeDoc { @@ -132,8 +129,8 @@ export class BrainStore { createdBy: string, ): Promise { const versionId = nanoid(); - const snapshotKey = this.versionPath(workspaceId, doc.id, versionId); - await writeJsonFile(snapshotKey, doc); + const snapshotKey = this.versionKey(workspaceId, doc.id, versionId); + await writeJsonKey(snapshotKey, doc); const version: BrainDocumentVersionRecord = { id: versionId, @@ -197,7 +194,7 @@ export class BrainStore { deletedAt: null, }; manifest.documents.push(doc); - await writeJsonFile(this.liveDocPath(workspace.id, doc.id), this.toPublicDoc(doc)); + await writeJsonKey(this.liveDocKey(workspace.id, doc.id), this.toPublicDoc(doc)); await this.createVersion( manifest, workspace.id, @@ -256,7 +253,7 @@ export class BrainStore { manifest.documents.unshift(doc); } - await writeJsonFile(this.liveDocPath(workspaceId, doc.id), this.toPublicDoc(doc)); + await writeJsonKey(this.liveDocKey(workspaceId, doc.id), this.toPublicDoc(doc)); await this.createVersion( manifest, workspaceId, @@ -298,7 +295,7 @@ export class BrainStore { doc.metrics.views += 1; doc.metrics.lastViewedAt = nowIso(); doc.updatedAt = nowIso(); - await writeJsonFile(this.liveDocPath(workspaceId, doc.id), this.toPublicDoc(doc)); + await writeJsonKey(this.liveDocKey(workspaceId, doc.id), this.toPublicDoc(doc)); await this.writeManifest(manifest); return this.toPublicDoc(doc); } @@ -317,7 +314,7 @@ export class BrainStore { doc.metrics.feedback.push(feedback); doc.updatedAt = nowIso(); manifest.feedback.unshift({ ...feedback, workspaceId, docId }); - await writeJsonFile(this.liveDocPath(workspaceId, doc.id), this.toPublicDoc(doc)); + await writeJsonKey(this.liveDocKey(workspaceId, doc.id), this.toPublicDoc(doc)); await this.writeManifest(manifest); return this.toPublicDoc(doc); } @@ -337,7 +334,7 @@ export class BrainStore { ); if (!version) return null; - const snapshot = await readJsonFile(version.snapshotKey, null); + const snapshot = await readJsonKey(version.snapshotKey, null); if (!snapshot) return null; const index = manifest.documents.findIndex( @@ -357,7 +354,7 @@ export class BrainStore { }; manifest.documents[index] = restored; - await writeJsonFile(this.liveDocPath(workspaceId, restored.id), this.toPublicDoc(restored)); + await writeJsonKey(this.liveDocKey(workspaceId, restored.id), this.toPublicDoc(restored)); await this.createVersion( manifest, workspaceId, diff --git a/src/lib/collaboration/object-store.ts b/src/lib/collaboration/object-store.ts index 77afb97..ff0f978 100644 --- a/src/lib/collaboration/object-store.ts +++ b/src/lib/collaboration/object-store.ts @@ -1,6 +1,5 @@ import { createHash } from "node:crypto"; -import fs from "node:fs/promises"; -import path from "node:path"; +import type { StorageProvider } from "@/lib/storage"; interface CollabObjectMetadata { roomId: string; @@ -13,55 +12,38 @@ function nowIso(): string { return new Date().toISOString(); } -async function ensureDir(dir: string): Promise { - await fs.mkdir(dir, { recursive: true }); -} - -async function atomicWrite(filePath: string, content: string | Uint8Array): Promise { - const tempPath = `${filePath}.tmp`; - await ensureDir(path.dirname(filePath)); - await fs.writeFile(tempPath, content); - await fs.rename(tempPath, filePath); -} - export class CollabObjectStore { - constructor(private readonly dataDir: string) {} + constructor(private readonly provider: StorageProvider) {} private roomKey(roomId: string): string { return createHash("sha256").update(roomId).digest("hex"); } - private roomDir(roomId: string): string { - return path.join(this.dataDir, "rooms", this.roomKey(roomId)); - } - - private statePath(roomId: string): string { - return path.join(this.roomDir(roomId), "state.bin"); + private stateKey(roomId: string): string { + return `rooms/${this.roomKey(roomId)}/state.bin`; } - private metadataPath(roomId: string): string { - return path.join(this.roomDir(roomId), "metadata.json"); + private metadataKey(roomId: string): string { + return `rooms/${this.roomKey(roomId)}/metadata.json`; } async load(roomId: string): Promise { - try { - const state = await fs.readFile(this.statePath(roomId)); - return new Uint8Array(state); - } catch { - return null; - } + return this.provider.readBytes(this.stateKey(roomId)); } async store(roomId: string, state: Uint8Array): Promise { - const stateKey = this.statePath(roomId); + const key = this.stateKey(roomId); const metadata: CollabObjectMetadata = { roomId, - stateKey, + stateKey: key, updatedAt: nowIso(), byteLength: state.byteLength, }; - await atomicWrite(stateKey, state); - await atomicWrite(this.metadataPath(roomId), JSON.stringify(metadata, null, 2)); + await this.provider.writeAtomic(key, state); + await this.provider.writeAtomic( + this.metadataKey(roomId), + JSON.stringify(metadata, null, 2), + ); } } diff --git a/src/lib/storage/__tests__/factory.test.ts b/src/lib/storage/__tests__/factory.test.ts new file mode 100644 index 0000000..d94e225 --- /dev/null +++ b/src/lib/storage/__tests__/factory.test.ts @@ -0,0 +1,70 @@ +import { afterEach, describe, expect, it } from "bun:test"; +import { + getStorageProvider, + registerStorageProvider, + resetStorageProvider, +} from "../factory"; +import { LocalFilesystemProvider } from "../local-provider"; +import { resetStorageConfigCache } from "../config"; +import type { StorageProvider } from "../types"; + +describe("Storage Factory", () => { + afterEach(() => { + resetStorageProvider(); + resetStorageConfigCache(); + }); + + it("returns a LocalFilesystemProvider by default", () => { + const provider = getStorageProvider(); + expect(provider).toBeInstanceOf(LocalFilesystemProvider); + }); + + it("caches the provider instance", () => { + const a = getStorageProvider(); + const b = getStorageProvider(); + expect(a).toBe(b); + }); + + it("resets the cache with resetStorageProvider", () => { + const a = getStorageProvider(); + resetStorageProvider(); + const b = getStorageProvider(); + expect(a).not.toBe(b); + }); + + it("supports custom provider registration", () => { + const mockProvider: StorageProvider = { + read: async () => null, + readBytes: async () => null, + write: async () => {}, + writeBytes: async () => {}, + writeAtomic: async () => {}, + delete: async () => false, + deleteTree: async () => false, + exists: async () => false, + stat: async () => null, + list: async () => [], + listDirectories: async () => [], + }; + + registerStorageProvider("mock", () => mockProvider); + process.env.NEXUS_STORAGE_PROVIDER = "mock"; + resetStorageConfigCache(); + resetStorageProvider(); + + const provider = getStorageProvider(); + expect(provider).toBe(mockProvider); + + delete process.env.NEXUS_STORAGE_PROVIDER; + }); + + it("throws for unknown provider type", () => { + process.env.NEXUS_STORAGE_PROVIDER = "nonexistent"; + resetStorageConfigCache(); + resetStorageProvider(); + + expect(() => getStorageProvider()).toThrow("Unknown storage provider type"); + + delete process.env.NEXUS_STORAGE_PROVIDER; + }); +}); diff --git a/src/lib/storage/__tests__/local-provider.test.ts b/src/lib/storage/__tests__/local-provider.test.ts new file mode 100644 index 0000000..d975030 --- /dev/null +++ b/src/lib/storage/__tests__/local-provider.test.ts @@ -0,0 +1,170 @@ +import fs from "node:fs/promises"; +import os from "node:os"; +import path from "node:path"; +import { afterEach, beforeEach, describe, expect, it } from "bun:test"; +import { LocalFilesystemProvider } from "../local-provider"; + +let tempDir = ""; +let provider: LocalFilesystemProvider; + +describe("LocalFilesystemProvider", () => { + beforeEach(async () => { + tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "nexus-storage-test-")); + provider = new LocalFilesystemProvider(tempDir); + }); + + afterEach(async () => { + if (tempDir) { + await fs.rm(tempDir, { recursive: true, force: true }); + } + }); + + describe("read/write", () => { + it("round-trips string content", async () => { + await provider.write("test/file.txt", "hello world"); + const result = await provider.read("test/file.txt"); + expect(result).toBe("hello world"); + }); + + it("returns null for non-existent key", async () => { + const result = await provider.read("missing/file.txt"); + expect(result).toBeNull(); + }); + + it("handles empty string content", async () => { + await provider.write("empty.txt", ""); + const result = await provider.read("empty.txt"); + expect(result).toBe(""); + }); + }); + + describe("readBytes/writeBytes", () => { + it("round-trips binary content", async () => { + const data = new Uint8Array([0, 1, 2, 255, 128, 64]); + await provider.writeBytes("bin/data.bin", data); + const result = await provider.readBytes("bin/data.bin"); + expect(result).toEqual(data); + }); + + it("returns null for non-existent key", async () => { + const result = await provider.readBytes("missing.bin"); + expect(result).toBeNull(); + }); + }); + + describe("writeAtomic", () => { + it("produces correct file with string content", async () => { + await provider.writeAtomic("atomic.txt", "atomic content"); + const result = await provider.read("atomic.txt"); + expect(result).toBe("atomic content"); + }); + + it("produces correct file with binary content", async () => { + const data = new Uint8Array([10, 20, 30]); + await provider.writeAtomic("atomic.bin", data); + const result = await provider.readBytes("atomic.bin"); + expect(result).toEqual(data); + }); + + it("does not leave temp file behind", async () => { + await provider.writeAtomic("clean.txt", "data"); + const files = await provider.list("."); + expect(files).not.toContain("clean.txt.tmp"); + }); + }); + + describe("delete", () => { + it("returns true for existing file", async () => { + await provider.write("del.txt", "data"); + const result = await provider.delete("del.txt"); + expect(result).toBe(true); + expect(await provider.exists("del.txt")).toBe(false); + }); + + it("returns false for missing file", async () => { + const result = await provider.delete("missing.txt"); + expect(result).toBe(false); + }); + }); + + describe("deleteTree", () => { + it("removes directory recursively", async () => { + await provider.write("tree/a.txt", "a"); + await provider.write("tree/sub/b.txt", "b"); + const result = await provider.deleteTree("tree"); + expect(result).toBe(true); + expect(await provider.exists("tree")).toBe(false); + }); + + it("returns false for non-existent path", async () => { + const result = await provider.deleteTree("nope"); + expect(result).toBe(false); + }); + }); + + describe("exists", () => { + it("returns true for existing file", async () => { + await provider.write("exists.txt", "data"); + expect(await provider.exists("exists.txt")).toBe(true); + }); + + it("returns false for missing file", async () => { + expect(await provider.exists("nope.txt")).toBe(false); + }); + }); + + describe("stat", () => { + it("returns correct metadata", async () => { + await provider.write("stat.txt", "hello"); + const meta = await provider.stat("stat.txt"); + expect(meta).not.toBeNull(); + expect(meta!.size).toBe(5); + expect(typeof meta!.lastModified).toBe("string"); + }); + + it("returns null for missing file", async () => { + const meta = await provider.stat("missing.txt"); + expect(meta).toBeNull(); + }); + }); + + describe("list", () => { + it("returns file names under prefix", async () => { + await provider.write("dir/a.json", "{}"); + await provider.write("dir/b.json", "{}"); + await fs.mkdir(path.join(tempDir, "dir", "subdir"), { recursive: true }); + const files = await provider.list("dir"); + expect(files.sort()).toEqual(["a.json", "b.json"]); + }); + + it("returns empty array for non-existent prefix", async () => { + const files = await provider.list("nope"); + expect(files).toEqual([]); + }); + }); + + describe("listDirectories", () => { + it("returns only directories", async () => { + await provider.write("parent/file.txt", "data"); + await fs.mkdir(path.join(tempDir, "parent", "child1"), { recursive: true }); + await fs.mkdir(path.join(tempDir, "parent", "child2"), { recursive: true }); + const dirs = await provider.listDirectories("parent"); + expect(dirs.sort()).toEqual(["child1", "child2"]); + }); + + it("returns empty array for non-existent prefix", async () => { + const dirs = await provider.listDirectories("nope"); + expect(dirs).toEqual([]); + }); + }); + + describe("path traversal prevention", () => { + it("rejects keys with ..", async () => { + await expect(provider.read("../../etc/passwd")).rejects.toThrow("Invalid storage key"); + }); + + it("rejects absolute paths", async () => { + await expect(provider.read("/etc/passwd")).rejects.toThrow("Invalid storage key"); + }); + }); +}); diff --git a/src/lib/storage/config.ts b/src/lib/storage/config.ts new file mode 100644 index 0000000..11bf48b --- /dev/null +++ b/src/lib/storage/config.ts @@ -0,0 +1,27 @@ +import path from "node:path"; +import type { StorageProviderType } from "./types"; + +export interface StorageConfig { + providerType: StorageProviderType; + rootDir: string; +} + +let cachedConfig: StorageConfig | null = null; + +export function getStorageConfig(): StorageConfig { + if (cachedConfig) return cachedConfig; + + const brainDataDir = + process.env.NEXUS_BRAIN_DATA_DIR ?? path.join(process.cwd(), ".nexus-brain"); + + cachedConfig = { + providerType: process.env.NEXUS_STORAGE_PROVIDER ?? "local", + rootDir: process.env.NEXUS_STORAGE_ROOT ?? brainDataDir, + }; + + return cachedConfig; +} + +export function resetStorageConfigCache(): void { + cachedConfig = null; +} diff --git a/src/lib/storage/factory.ts b/src/lib/storage/factory.ts new file mode 100644 index 0000000..244fa85 --- /dev/null +++ b/src/lib/storage/factory.ts @@ -0,0 +1,42 @@ +import { getStorageConfig } from "./config"; +import { LocalFilesystemProvider } from "./local-provider"; +import type { StorageProvider } from "./types"; + +type ProviderFactory = () => StorageProvider; + +const providerFactories = new Map(); + +providerFactories.set("local", () => { + const config = getStorageConfig(); + return new LocalFilesystemProvider(config.rootDir); +}); + +let cachedProvider: StorageProvider | null = null; + +export function getStorageProvider(): StorageProvider { + if (cachedProvider) return cachedProvider; + + const config = getStorageConfig(); + const factory = providerFactories.get(config.providerType); + + if (!factory) { + const available = Array.from(providerFactories.keys()).join(", "); + throw new Error( + `Unknown storage provider type: "${config.providerType}". Available providers: ${available}`, + ); + } + + cachedProvider = factory(); + return cachedProvider; +} + +export function resetStorageProvider(): void { + cachedProvider = null; +} + +export function registerStorageProvider( + type: string, + factory: ProviderFactory, +): void { + providerFactories.set(type, factory); +} diff --git a/src/lib/storage/index.ts b/src/lib/storage/index.ts new file mode 100644 index 0000000..532974f --- /dev/null +++ b/src/lib/storage/index.ts @@ -0,0 +1,11 @@ +export type { + StorageProvider, + StorageMetadata, + StorageProviderType, +} from "./types"; +export { + getStorageProvider, + resetStorageProvider, + registerStorageProvider, +} from "./factory"; +export { LocalFilesystemProvider } from "./local-provider"; diff --git a/src/lib/storage/local-provider.ts b/src/lib/storage/local-provider.ts new file mode 100644 index 0000000..f79d8ca --- /dev/null +++ b/src/lib/storage/local-provider.ts @@ -0,0 +1,128 @@ +import fs from "node:fs/promises"; +import path from "node:path"; +import type { StorageMetadata, StorageProvider } from "./types"; + +export class LocalFilesystemProvider implements StorageProvider { + constructor(private readonly rootDir: string) {} + + private resolve(key: string): string { + const resolved = path.resolve(this.rootDir, key); + const relative = path.relative(this.rootDir, resolved); + if (relative.startsWith("..") || path.isAbsolute(relative)) { + throw new Error(`Invalid storage key: ${key}`); + } + // Empty relative means root directory itself — valid for list operations + return resolved; + } + + private async ensureParent(filePath: string): Promise { + await fs.mkdir(path.dirname(filePath), { recursive: true }); + } + + async read(key: string): Promise { + const filePath = this.resolve(key); + try { + return await fs.readFile(filePath, "utf8"); + } catch { + return null; + } + } + + async readBytes(key: string): Promise { + const filePath = this.resolve(key); + try { + const buf = await fs.readFile(filePath); + return new Uint8Array(buf); + } catch { + return null; + } + } + + async write(key: string, content: string): Promise { + const filePath = this.resolve(key); + await this.ensureParent(filePath); + await fs.writeFile(filePath, content, "utf8"); + } + + async writeBytes(key: string, content: Uint8Array): Promise { + const filePath = this.resolve(key); + await this.ensureParent(filePath); + await fs.writeFile(filePath, content); + } + + async writeAtomic(key: string, content: string | Uint8Array): Promise { + const filePath = this.resolve(key); + const tmpPath = `${filePath}.tmp`; + await this.ensureParent(filePath); + if (typeof content === "string") { + await fs.writeFile(tmpPath, content, "utf8"); + } else { + await fs.writeFile(tmpPath, content); + } + await fs.rename(tmpPath, filePath); + } + + async delete(key: string): Promise { + const filePath = this.resolve(key); + try { + await fs.unlink(filePath); + return true; + } catch { + return false; + } + } + + async deleteTree(key: string): Promise { + const resolved = this.resolve(key); + try { + await fs.access(resolved); + await fs.rm(resolved, { recursive: true, force: true }); + return true; + } catch { + return false; + } + } + + async exists(key: string): Promise { + const filePath = this.resolve(key); + try { + await fs.access(filePath); + return true; + } catch { + return false; + } + } + + async stat(key: string): Promise { + const filePath = this.resolve(key); + try { + const stats = await fs.stat(filePath); + return { + size: stats.size, + lastModified: stats.mtime.toISOString(), + }; + } catch { + return null; + } + } + + async list(prefix: string): Promise { + const resolved = this.resolve(prefix); + try { + const entries = await fs.readdir(resolved, { withFileTypes: true }); + return entries.filter((e) => e.isFile()).map((e) => e.name); + } catch { + return []; + } + } + + async listDirectories(prefix: string): Promise { + const resolved = this.resolve(prefix); + try { + const entries = await fs.readdir(resolved, { withFileTypes: true }); + return entries.filter((e) => e.isDirectory()).map((e) => e.name); + } catch { + return []; + } + } +} diff --git a/src/lib/storage/types.ts b/src/lib/storage/types.ts new file mode 100644 index 0000000..0e5dcbd --- /dev/null +++ b/src/lib/storage/types.ts @@ -0,0 +1,41 @@ +export interface StorageMetadata { + size: number; + lastModified: string; +} + +export interface StorageProvider { + /** Read a file as a UTF-8 string. Returns null if not found. */ + read(key: string): Promise; + + /** Read a file as raw bytes. Returns null if not found. */ + readBytes(key: string): Promise; + + /** Write a UTF-8 string to a key. Creates parent directories/prefixes as needed. */ + write(key: string, content: string): Promise; + + /** Write raw bytes to a key. Creates parent directories/prefixes as needed. */ + writeBytes(key: string, content: Uint8Array): Promise; + + /** Atomically write content (write to temp, then rename). Providers that don't support atomic ops fall back to regular write. */ + writeAtomic(key: string, content: string | Uint8Array): Promise; + + /** Delete a single key. Returns true if deleted, false if not found. */ + delete(key: string): Promise; + + /** Recursively delete a key prefix/directory. Returns true if anything was deleted. */ + deleteTree(key: string): Promise; + + /** Check if a key exists. */ + exists(key: string): Promise; + + /** Get metadata for a key. Returns null if not found. */ + stat(key: string): Promise; + + /** List immediate children under a prefix. Returns relative names. */ + list(prefix: string): Promise; + + /** List immediate child directories under a prefix. Returns relative names. */ + listDirectories(prefix: string): Promise; +} + +export type StorageProviderType = "local" | string; diff --git a/src/lib/workspace/local-history.ts b/src/lib/workspace/local-history.ts index 94713ca..7eab1e3 100644 --- a/src/lib/workspace/local-history.ts +++ b/src/lib/workspace/local-history.ts @@ -34,3 +34,14 @@ export function addRecentWorkspace(entry: RecentWorkspaceEntry): void { // localStorage may be unavailable } } + +export function removeRecentWorkspace(id: string): void { + if (typeof window === "undefined") return; + try { + const existing = getRecentWorkspaces(); + const updated = existing.filter((entry) => entry.id !== id); + localStorage.setItem(STORAGE_KEY, JSON.stringify(updated)); + } catch { + // localStorage may be unavailable + } +} diff --git a/src/lib/workspace/server.ts b/src/lib/workspace/server.ts index fc8d361..c300940 100644 --- a/src/lib/workspace/server.ts +++ b/src/lib/workspace/server.ts @@ -1,7 +1,6 @@ -import fs from "node:fs/promises"; -import path from "node:path"; import { customAlphabet } from "nanoid"; -import { getWorkspaceConfig } from "./config"; +import { writeSnapshot } from "./snapshots"; +import { getStorageProvider } from "@/lib/storage"; import type { WorkspaceManifest, WorkspaceRecord, WorkflowRecord } from "./types"; import type { WorkflowJSON } from "@/types/workflow"; @@ -12,38 +11,39 @@ function nowIso(): string { return new Date().toISOString(); } -async function ensureDir(dir: string): Promise { - await fs.mkdir(dir, { recursive: true }); +function storage() { + return getStorageProvider(); } -async function readJsonFile(filePath: string, fallback: T): Promise { - try { - const raw = await fs.readFile(filePath, "utf8"); - return JSON.parse(raw) as T; - } catch { - return fallback; +function validateId(id: string): void { + if (!id || id.includes("..") || id.includes("/") || id.includes("\\")) { + throw new Error("Invalid workspace id"); } } -async function writeJsonFile(filePath: string, value: unknown): Promise { - await ensureDir(path.dirname(filePath)); - await fs.writeFile(filePath, JSON.stringify(value, null, 2), "utf8"); +function manifestKey(id: string): string { + validateId(id); + return `workspaces/${id}/${MANIFEST_FILE}`; } -function workspaceDir(id: string): string { - return path.join(getWorkspaceConfig().dataDir, id); +function workflowKey(workspaceId: string, workflowId: string): string { + validateId(workspaceId); + validateId(workflowId); + return `workspaces/${workspaceId}/workflows/${workflowId}.json`; } -function manifestPath(id: string): string { - return path.join(workspaceDir(id), MANIFEST_FILE); -} - -function workflowsDir(workspaceId: string): string { - return path.join(workspaceDir(workspaceId), "workflows"); +async function readJsonKey(key: string, fallback: T): Promise { + const raw = await storage().read(key); + if (raw === null) return fallback; + try { + return JSON.parse(raw) as T; + } catch { + return fallback; + } } -function workflowPath(workspaceId: string, workflowId: string): string { - return path.join(workflowsDir(workspaceId), `${workflowId}.json`); +async function writeJsonKey(key: string, value: unknown): Promise { + await storage().write(key, JSON.stringify(value, null, 2)); } function createDefaultWorkflowJSON(name: string): WorkflowJSON { @@ -59,25 +59,39 @@ function createDefaultWorkflowJSON(name: string): WorkflowJSON { }; } +export async function listWorkspaces(): Promise { + try { + const dirs = await storage().listDirectories("workspaces"); + const workspaces: WorkspaceRecord[] = []; + for (const dir of dirs) { + const key = `workspaces/${dir}/${MANIFEST_FILE}`; + const manifest = await readJsonKey(key, null); + if (manifest?.workspace) { + workspaces.push(manifest.workspace); + } + } + workspaces.sort((a, b) => new Date(b.updatedAt).getTime() - new Date(a.updatedAt).getTime()); + return workspaces; + } catch { + return []; + } +} + export async function createWorkspace(name: string): Promise { const id = nanoid(); const now = nowIso(); const workspace: WorkspaceRecord = { id, name, createdAt: now, updatedAt: now }; const manifest: WorkspaceManifest = { version: 1, workspace, workflows: [] }; - await ensureDir(workflowsDir(id)); - await writeJsonFile(manifestPath(id), manifest); + await writeJsonKey(manifestKey(id), manifest); return workspace; } export async function getWorkspace(id: string): Promise { - try { - await fs.access(manifestPath(id)); - } catch { - return null; - } - return readJsonFile(manifestPath(id), null); + const key = manifestKey(id); + if (!(await storage().exists(key))) return null; + return readJsonKey(key, null); } export async function updateWorkspace( @@ -89,11 +103,20 @@ export async function updateWorkspace( manifest.workspace.name = updates.name; manifest.workspace.updatedAt = nowIso(); - await writeJsonFile(manifestPath(id), manifest); + await writeJsonKey(manifestKey(id), manifest); return manifest.workspace; } +export async function deleteWorkspace(id: string): Promise { + const manifest = await getWorkspace(id); + if (!manifest) return false; + + validateId(id); + await storage().deleteTree(`workspaces/${id}`); + return true; +} + export async function createWorkflow( workspaceId: string, name: string, @@ -114,8 +137,8 @@ export async function createWorkflow( manifest.workflows.push(record); manifest.workspace.updatedAt = now; - await writeJsonFile(workflowPath(workspaceId, id), createDefaultWorkflowJSON(name)); - await writeJsonFile(manifestPath(workspaceId), manifest); + await writeJsonKey(workflowKey(workspaceId, id), createDefaultWorkflowJSON(name)); + await writeJsonKey(manifestKey(workspaceId), manifest); return record; } @@ -124,12 +147,9 @@ export async function getWorkflow( workspaceId: string, workflowId: string, ): Promise { - try { - await fs.access(workflowPath(workspaceId, workflowId)); - } catch { - return null; - } - return readJsonFile(workflowPath(workspaceId, workflowId), null); + const key = workflowKey(workspaceId, workflowId); + if (!(await storage().exists(key))) return null; + return readJsonKey(key, null); } export async function saveWorkflow( @@ -149,8 +169,9 @@ export async function saveWorkflow( record.lastModifiedBy = lastModifiedBy; manifest.workspace.updatedAt = now; - await writeJsonFile(workflowPath(workspaceId, workflowId), data); - await writeJsonFile(manifestPath(workspaceId), manifest); + await writeJsonKey(workflowKey(workspaceId, workflowId), data); + await writeJsonKey(manifestKey(workspaceId), manifest); + await writeSnapshot(workspaceId, workflowId, data, lastModifiedBy); return true; } @@ -169,7 +190,7 @@ export async function updateWorkflowMeta( record.name = updates.name; record.updatedAt = nowIso(); manifest.workspace.updatedAt = record.updatedAt; - await writeJsonFile(manifestPath(workspaceId), manifest); + await writeJsonKey(manifestKey(workspaceId), manifest); return record; } @@ -187,12 +208,8 @@ export async function deleteWorkflow( manifest.workflows.splice(index, 1); manifest.workspace.updatedAt = nowIso(); - try { - await fs.unlink(workflowPath(workspaceId, workflowId)); - } catch { - // file may already be gone - } + await storage().delete(workflowKey(workspaceId, workflowId)); - await writeJsonFile(manifestPath(workspaceId), manifest); + await writeJsonKey(manifestKey(workspaceId), manifest); return true; } diff --git a/src/lib/workspace/snapshots.ts b/src/lib/workspace/snapshots.ts new file mode 100644 index 0000000..f2fea90 --- /dev/null +++ b/src/lib/workspace/snapshots.ts @@ -0,0 +1,200 @@ +import { getStorageProvider } from "@/lib/storage"; +import { getWorkspace } from "./server"; +import type { SnapshotMeta, SnapshotFile, ChangeEvent, WorkflowChanges, ChangesResponse } from "./types"; +import type { WorkflowJSON } from "@/types/workflow"; + +function storage() { + return getStorageProvider(); +} + +function snapshotsPrefix(workspaceId: string, workflowId: string): string { + return `workspaces/${workspaceId}/snapshots/${workflowId}`; +} + +function toUrlSafeTimestamp(iso: string): string { + return iso.replace(/:/g, "-"); +} + +function fromUrlSafeTimestamp(safe: string): string { + const tIndex = safe.indexOf("T"); + if (tIndex < 0) return safe; + const datePart = safe.slice(0, tIndex); + const timePart = safe.slice(tIndex).replace(/-/g, ":"); + return datePart + timePart; +} + +export async function writeSnapshot( + workspaceId: string, + workflowId: string, + data: WorkflowJSON, + savedBy: string, +): Promise { + const timestamp = new Date().toISOString(); + const snapshot: SnapshotFile = { timestamp, workflowId, workspaceId, savedBy, data }; + const filename = `${toUrlSafeTimestamp(timestamp)}.json`; + const key = `${snapshotsPrefix(workspaceId, workflowId)}/${filename}`; + + await storage().writeAtomic(key, JSON.stringify(snapshot, null, 2)); +} + +export async function listSnapshots( + workspaceId: string, + workflowId: string, +): Promise { + const prefix = snapshotsPrefix(workspaceId, workflowId); + const entries = await storage().list(prefix); + + const metas: SnapshotMeta[] = []; + for (const entry of entries) { + if (!entry.endsWith(".json") || entry.endsWith(".tmp")) continue; + const safeName = entry.replace(".json", ""); + const timestamp = fromUrlSafeTimestamp(safeName); + try { + const raw = await storage().read(`${prefix}/${entry}`); + if (!raw) continue; + const snap = JSON.parse(raw) as SnapshotFile; + metas.push({ timestamp, savedBy: snap.savedBy }); + } catch { + // skip corrupt files + } + } + + metas.sort((a, b) => a.timestamp.localeCompare(b.timestamp)); + return metas; +} + +export async function getSnapshot( + workspaceId: string, + workflowId: string, + timestamp: string, +): Promise { + const filename = `${toUrlSafeTimestamp(timestamp)}.json`; + const key = `${snapshotsPrefix(workspaceId, workflowId)}/${filename}`; + try { + const raw = await storage().read(key); + if (!raw) return null; + return JSON.parse(raw) as SnapshotFile; + } catch { + return null; + } +} + +interface NodeInfo { + id: string; + label: string; +} + +function extractNodes(data: WorkflowJSON): Map { + const map = new Map(); + for (const node of data.nodes) { + map.set(node.id, { + id: node.id, + label: (node.data as Record)?.label as string ?? node.id, + }); + } + return map; +} + +function diffNodeSets( + older: Map, + newer: Map, + savedBy: string, + timestamp: string, +): ChangeEvent[] { + const events: ChangeEvent[] = []; + + for (const [id, info] of newer) { + if (!older.has(id)) { + events.push({ type: "node_added", nodeName: info.label, by: savedBy, at: timestamp }); + } + } + + for (const [id, info] of older) { + if (!newer.has(id)) { + events.push({ type: "node_deleted", nodeName: info.label, by: savedBy, at: timestamp }); + } + } + + for (const [id, newInfo] of newer) { + const oldInfo = older.get(id); + if (oldInfo && oldInfo.label !== newInfo.label) { + events.push({ + type: "node_renamed", + nodeName: newInfo.label, + from: oldInfo.label, + to: newInfo.label, + by: savedBy, + at: timestamp, + }); + } + } + + return events; +} + +export async function computeChanges( + workspaceId: string, + since: string, +): Promise { + const manifest = await getWorkspace(workspaceId); + if (!manifest) return { changes: [] }; + + const results: WorkflowChanges[] = []; + + for (const wfRecord of manifest.workflows) { + const allMetas = await listSnapshots(workspaceId, wfRecord.id); + if (allMetas.length === 0) continue; + + const afterSince = allMetas.filter((m) => m.timestamp > since); + if (afterSince.length === 0) continue; + + const beforeSince = allMetas.filter((m) => m.timestamp <= since); + const baselineMeta = beforeSince.length > 0 ? beforeSince[beforeSince.length - 1] : null; + + const snapshotsToWalk: SnapshotFile[] = []; + + if (baselineMeta) { + const baseSnap = await getSnapshot(workspaceId, wfRecord.id, baselineMeta.timestamp); + if (baseSnap) snapshotsToWalk.push(baseSnap); + } + + for (const meta of afterSince) { + const snap = await getSnapshot(workspaceId, wfRecord.id, meta.timestamp); + if (snap) snapshotsToWalk.push(snap); + } + + if (snapshotsToWalk.length === 0) continue; + + const events: ChangeEvent[] = []; + + if (!baselineMeta && snapshotsToWalk.length > 0) { + const first = snapshotsToWalk[0]; + const emptyMap = new Map(); + const firstNodes = extractNodes(first.data); + events.push(...diffNodeSets(emptyMap, firstNodes, first.savedBy, first.timestamp)); + + for (let i = 1; i < snapshotsToWalk.length; i++) { + const older = extractNodes(snapshotsToWalk[i - 1].data); + const newer = extractNodes(snapshotsToWalk[i].data); + events.push(...diffNodeSets(older, newer, snapshotsToWalk[i].savedBy, snapshotsToWalk[i].timestamp)); + } + } else { + for (let i = 1; i < snapshotsToWalk.length; i++) { + const older = extractNodes(snapshotsToWalk[i - 1].data); + const newer = extractNodes(snapshotsToWalk[i].data); + events.push(...diffNodeSets(older, newer, snapshotsToWalk[i].savedBy, snapshotsToWalk[i].timestamp)); + } + } + + if (events.length > 0) { + results.push({ + workflowId: wfRecord.id, + workflowName: wfRecord.name, + changeCount: events.length, + events, + }); + } + } + + return { changes: results }; +} diff --git a/src/lib/workspace/types.ts b/src/lib/workspace/types.ts index b718ea7..6e0ba5a 100644 --- a/src/lib/workspace/types.ts +++ b/src/lib/workspace/types.ts @@ -19,3 +19,40 @@ export interface WorkspaceManifest { workspace: WorkspaceRecord; workflows: WorkflowRecord[]; } + +// Snapshot types +export interface SnapshotMeta { + timestamp: string; + savedBy: string; +} + +export interface SnapshotFile { + timestamp: string; + workflowId: string; + workspaceId: string; + savedBy: string; + data: import("@/types/workflow").WorkflowJSON; +} + +// Change event types +export type ChangeEventType = "node_added" | "node_deleted" | "node_renamed"; + +export interface ChangeEvent { + type: ChangeEventType; + nodeName: string; + from?: string; + to?: string; + by: string; + at: string; +} + +export interface WorkflowChanges { + workflowId: string; + workflowName: string; + changeCount: number; + events: ChangeEvent[]; +} + +export interface ChangesResponse { + changes: WorkflowChanges[]; +}