diff --git a/AGENTS.md b/AGENTS.md index 9c2c504..0d0725a 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,50 +1,176 @@ # Coding Agent Guide for Construct ## Build/Lint/Test Commands -### Core Commands```bash -bun run index.ts --list bun run build # Build current platform -bun run build:all # Build all platforms (linux-x64, arm-64, macos-..., windows)bun run build:macos-arm64 # Build specific platform -```### Running Single Test (Manual)```bash -# Manual verification steps:1 bun run index.ts --list2 Verify output contains "Available plugins:"3 Test with plugin: bun run index --load playwright@claude-plugins-original -```### Typechecking```bash -# Bun infers tsconfig.json automaticallybun run index.ts --help # Triggers typechecking + +### Core Commands +```bash +bun run build # Build current platform +bun run build:all # Build all platforms (linux-x64, linux-arm64, macos-x64, macos-arm64, windows-x64) +bun run build:macos-arm64 # Build specific platform +bun run typecheck # Type checking with TypeScript ``` -## Code Style Guidelines +### Running Tests +```bash +# Run all tests +bun test + +# Run specific test file +bun test src/plugin.test.ts +bun test src/marketplace.test.ts +bun test src/cache.test.ts +bun test src/env-expansion.test.ts -### TypeScript Configuration (tsconfig.json)Target: ESNext, Module resolution: bundler with verbatim syntax, Strict mode enabled (strict, noUncheckedIndexedAccess), NoEmit: true### Imports Order & Style1 Node built-ins use explicit `node:` prefix: `import { join } from "node:path";` -2 External libraries import directly: `import yargs from "yargs";`3 Type imports use explicit `type` keyword### Interface Naming (PascalCase)CliArgs, PluginInfo, ConstructConfig, TranslationResult +# Run tests with coverage +bun test --coverage +``` + +### Manual Verification +```bash +# List available plugins +bun run index.ts --list -### Variable/Constant Naming (camelCase/SCREAMING_SCAL_Constants: CONFIG_FILE = ".construct.json", Variables: cliPlugins, enabledPluginNames +# Verify output contains "Available plugins:" +bun run index.ts --list | grep "Available plugins" -### Function Naming (camelCase)parseCliArgs, scanAllPlugins, loadConfig, saveConfig, mergeCliWithConfig### File Organization (src/)cli.ts: CLI argument parsing (yargs)scanner.ts: Plugin discovery/indexing (scanAllPlugins, scanInstalledPlugins)config.ts: Configuration management (.construct.json)translator.ts: Format translation (translatePlugins, expandPluginRootInObject)executor.ts: Copilot subprocess spawningcompletions.ts: Shell completion script generation +# Test with specific plugin +bun run index.ts --load playwright@claude-plugins-original -### Error Handling Patterns1 Console.warn for non-critical errors (missing files, scanning issues)2 Console.error for critical failures (file I/O, parsing errors) -3 Try/catch wrap file system operations and JSON parsing4 Functions return null on graceful failure (loadConfig, readMcpConfig) +# Test plugin scanning +bun run index.ts --list -### Async/await Style- Top-level async function: `async function main(): Promise`- Try/catch in async context, Concurrent via Promise.all +# Verify configuration persistence +cat .construct.json +``` -### JSDoc Comments (TSDoc on exported symbols)```typescript +## Code Style Guidelines + +### TypeScript Configuration (tsconfig.json) +- Target: ESNext +- Module resolution: bundler with verbatim syntax +- Strict mode enabled (strict, noUncheckedIndexedAccess, noFallthroughCasesInSwitch, noImplicitOverride) +- NoEmit: true +- Module: Preserve +- Module detection: force + +### Imports Order & Style +1. Node built-ins use explicit `node:` prefix: `import { join } from "node:path";` +2. External libraries import directly: `import yargs from "yargs";` +3. Type imports use explicit `type` keyword: `import type { PluginInfo } from "./scanner";` + +### Naming Conventions +- **Interfaces**: PascalCase (CliArgs, PluginInfo, ConstructConfig, TranslationResult, PluginComponent) +- **Variables**: camelCase (cliPlugins, enabledPluginNames, pluginCachePaths) +- **Constants**: SCREAMING_SNAKE_CASE (CONFIG_FILE = ".construct.json") +- **Functions**: camelCase (parseCliArgs, scanAllPlugins, loadConfig, saveConfig, mergeCliWithConfig) + +### File Organization (src/) +- `cli.ts`: CLI argument parsing (yargs) +- `scanner.ts`: Plugin discovery and indexing (scanAllPlugins, scanInstalledPlugins, scanMarketplacePlugins) +- `config.ts`: Configuration management (.construct.json) +- `translator.ts`: Format translation (translatePlugins, expandPluginRootInObject) +- `executor.ts`: Copilot subprocess spawning +- `completions.ts`: Shell completion script generation +- `plugin.ts`: Plugin management +- `marketplace.ts`: Marketplace operations +- `cache.ts`: Plugin caching +- `agent-translator.ts`: Agent format translation +- `skill-translator.ts`: Skill format translation +- `operator.ts`: Interactive plugin selector +- `env-expansion.ts`: Environment variable expansion + +### Error Handling Patterns +1. `console.warn` for non-critical errors (missing files, scanning issues, failed agent translation) +2. `console.error` for critical failures (file I/O, parsing errors, MCP config reading) +3. Try/catch wrap file system operations and JSON parsing +4. Functions return `null` on graceful failure (loadConfig, readMcpConfig) +5. Catch blocks should log errors but not crash the application + +### Async/await Style +- Top-level async function: `async function main(): Promise` +- Try/catch in async context +- Concurrent operations via `Promise.all()` +- Use `await` for file operations and async function calls + +### JSDoc Comments (TSDoc on exported symbols) +```typescript /** - * Represents a single component within a plugin (skill, MCP server, or agent) */export interface PluginComponent {} + * Represents a single component within a plugin (skill, MCP server, or agent) + */ +export interface PluginComponent {} -/** Scans all installed plugins and builds a registry */ -}```### Custom Types (avoid "any")Explicit interfaces: PluginInfo, PluginComponent, Generic type parameters: , Return types on all functions +/** + * Scans all installed plugins and builds a registry + */ +export async function scanAllPlugins(): Promise +``` -### Structure (all source files)1 Imports section2 Type/interface definitions3 Constant/module-level declarations4 Function implementations (exported and private) +### Custom Types +- Avoid "any" types - use explicit interfaces +- Use generic type parameters: ``, `` +- Define return types on all functions +- Use union types for multiple possibilities: `'skill' | 'mcp' | 'agent'` -### Naming Convention: PluginsFormat: `@`, Example: `tmux@scaryrawr-plugins` +### Structure (all source files) +1. Imports section +2. Type/interface definitions +3. Constant/module-level declarations +4. Function implementations (exported and private) -### File Paths (Absolute, no relative)Use `join` for path construction: `const configPath = join(process.cwd(), CONFIG_FILE)` +### Naming Convention: Plugins +Format: `@` +Example: `tmux@scaryrawr-plugins` -### Project Structure```src/├── cli.ts CLI argument parsing├── scanner.ts Plugin discovery and indexing├── config.ts Configuration management├── translator.ts Format translation logic└── executor.ts Copilot subprocess execution -``` +### File Paths +- Use absolute paths with `join()` from `node:path` +- Example: `const configPath = join(proc.cwd(), CONFIG_FILE)` where `proc` is an injected process-like dependency -### Environment Variables- `COPILOT_SKILLS_DIRS`: Comma-separated list of skill directories +### Environment Variables +- `COPILOT_SKILLS_DIRS`: Comma-separated list of skill directories - `CLAUDE_PLUGIN_ROOT`: Placeholder for plugin root path (expanded during translation) -### Testing Manual```bash -# Test plugin scanning: bun run index --list# Verify configuration persistence: cat .construct.json +### Common Issues +1. **No Plugins Found**: `installed_plugins.json` missing or empty - Install plugins via Claude Code first +2. **Plugin Not Found**: Name mismatch (case-sensitive) - Use exact format from `installed_plugins.json` +3. **MCP Servers Not Working**: Invalid `.mcp.json` - Validate JSON and required fields +4. **Skills Not Loading**: `COPILOT_SKILLS_DIRS` not set - Check environment variable construction +5. **Type Errors**: Run `bun run typecheck` to verify TypeScript configuration + +## Testing Patterns + +### Dependency Injection +All core modules use optional dependency injection for testability: +- `config.ts` - `ConfigDependencies` with `fs`, `process` +- `scanner.ts` - `ScannerDependencies` with `fs`, `process` +- `marketplace.ts` - `MarketplaceDependencies` with `fs`, `shell`, paths +- `cache.ts` - `CacheDependencies` with `fs`, `process`; returns `CacheInstance` +- `plugin.ts` - `PluginDependencies` with scanner, config, output functions +- `translator.ts` - `TranslatorDependencies` with `cache`, `fs` +- `executor.ts` - `ExecutorDependencies` with `shell`, `env` + +### Test Utilities +Import from `./test-utils`: +```typescript +import { createMemoryFileSystem, createMockProcess, createMockShell } from './test-utils'; ``` -### Common Issues1 No Plugins Found: installed_plugins.json missing or empty - Install plugins via Claude Code first2 Plugin Not Found: Name mismatch (case-sensitive) - Use exact format from installed_plugins.json3 MCP Servers Not Working: Invalid .mcp.0json - Validate JSON and required fields4 Skills Not Loading: COPILOT_SKILLS_DIRS not set - Check environment variable construction \ No newline at end of file +### Unit vs Integration Tests +- **Unit tests** (*.test.ts): Use mocks, no I/O, fast +- **Integration tests** (*.integration.test.ts): Use real file system in temp dirs + +### Example Unit Test +```typescript +import { describe, expect, test } from 'bun:test'; +import { createMemoryFileSystem, createMockProcess } from './test-utils'; + +test('example with mocks', async () => { + const fs = createMemoryFileSystem() + .withFile('/home/.construct.json', '{"enabledPlugins":[]}') + .build(); + const proc = createMockProcess({ cwd: '/work', homedir: '/home' }); + + // Call function with injected deps + const result = await someFunction({ fs, process: proc }); + expect(result).toBeDefined(); +}); +``` \ No newline at end of file diff --git a/src/adapters/bun-file-system.ts b/src/adapters/bun-file-system.ts new file mode 100644 index 0000000..dc2b0b6 --- /dev/null +++ b/src/adapters/bun-file-system.ts @@ -0,0 +1,150 @@ +import type { FileSystem, FileStat, MkdirOptions, RmOptions, CpOptions } from '../interfaces/file-system'; +import * as fs from 'fs'; +import * as fsPromises from 'fs/promises'; +import * as path from 'path'; + +/** + * File stat wrapper for Node.js fs.Stats + */ +class BunFileStat implements FileStat { + constructor(private stats: fs.Stats) {} + + isDirectory(): boolean { + return this.stats.isDirectory(); + } + + isFile(): boolean { + return this.stats.isFile(); + } +} + +/** + * FileSystem implementation using Bun APIs with Node.js fallbacks + */ +class BunFileSystem implements FileSystem { + /** + * Reads file content as UTF-8 string using Bun.file() + */ + async readFile(filePath: string): Promise { + try { + const file = Bun.file(filePath); + return await file.text(); + } catch (error) { + throw new Error(`Failed to read file ${filePath}: ${error instanceof Error ? error.message : String(error)}`); + } + } + + /** + * Writes content to file using Bun.write(), creating parent directories if needed + */ + async writeFile(filePath: string, content: string): Promise { + try { + // Ensure parent directory exists + const dir = path.dirname(filePath); + await this.mkdir(dir, { recursive: true }); + + // Write file using Bun.write() + await Bun.write(filePath, content); + } catch (error) { + throw new Error(`Failed to write file ${filePath}: ${error instanceof Error ? error.message : String(error)}`); + } + } + + /** + * Checks if path exists (file or directory) + */ + async exists(filePath: string): Promise { + try { + // First try as file + const file = Bun.file(filePath); + if (await file.exists()) { + return true; + } + // Fall back to stat for directories + await fsPromises.stat(filePath); + return true; + } catch (error) { + return false; + } + } + + /** + * Creates directory using Node.js fs.mkdir (with promises) + */ + async mkdir(dirPath: string, options?: MkdirOptions): Promise { + try { + await fsPromises.mkdir(dirPath, { + recursive: options?.recursive ?? false, + }); + } catch (error) { + // EEXIST is not an error if recursive is true + if ((error as NodeJS.ErrnoException)?.code === 'EEXIST' && options?.recursive) { + return; + } + throw new Error(`Failed to create directory ${dirPath}: ${error instanceof Error ? error.message : String(error)}`); + } + } + + /** + * Removes file or directory using Node.js fs.rm (with promises) + */ + async rm(filePath: string, options?: RmOptions): Promise { + try { + await fsPromises.rm(filePath, { + recursive: options?.recursive ?? false, + force: options?.force ?? false, + }); + } catch (error) { + // ENOENT is not an error if force is true + if ((error as NodeJS.ErrnoException)?.code === 'ENOENT' && options?.force) { + return; + } + throw new Error(`Failed to remove ${filePath}: ${error instanceof Error ? error.message : String(error)}`); + } + } + + /** + * Lists directory contents using Node.js fs.readdir (with promises) + */ + async readdir(dirPath: string): Promise { + try { + const entries = await fsPromises.readdir(dirPath); + return entries; + } catch (error) { + throw new Error(`Failed to read directory ${dirPath}: ${error instanceof Error ? error.message : String(error)}`); + } + } + + /** + * Gets file/directory stats using Node.js fs.stat (with promises) + */ + async stat(filePath: string): Promise { + try { + const stats = await fsPromises.stat(filePath); + return new BunFileStat(stats); + } catch (error) { + throw new Error(`Failed to stat ${filePath}: ${error instanceof Error ? error.message : String(error)}`); + } + } + + /** + * Copies file or directory using Node.js fs/promises cp + */ + async cp(src: string, dest: string, options?: CpOptions): Promise { + try { + await fsPromises.cp(src, dest, { + recursive: options?.recursive ?? false, + force: options?.force ?? false, + }); + } catch (error) { + throw new Error(`Failed to copy ${src} to ${dest}: ${error instanceof Error ? error.message : String(error)}`); + } + } +} + +/** + * Singleton instance for convenient access + */ +export const bunFileSystem = new BunFileSystem(); + +export { BunFileSystem }; diff --git a/src/adapters/bun-shell.ts b/src/adapters/bun-shell.ts new file mode 100644 index 0000000..28b382c --- /dev/null +++ b/src/adapters/bun-shell.ts @@ -0,0 +1,35 @@ +import type { Shell, SpawnOptions, SpawnSyncResult } from "../interfaces/shell"; + +/** + * Shell adapter implementation using Bun.spawnSync + */ +export class BunShell implements Shell { + /** + * Spawns a command synchronously and waits for completion + */ + spawnSync(cmd: string[], options?: SpawnOptions): SpawnSyncResult { + // Map SpawnOptions to Bun spawn options + const bunOptions: Parameters[1] = { + cwd: options?.cwd, + env: options?.env ? { ...process.env, ...options.env } : undefined, + stdout: options?.stdout === "pipe" ? "pipe" : options?.stdout, + stderr: options?.stderr === "pipe" ? "pipe" : options?.stderr, + stdin: options?.stdin === "pipe" ? "pipe" : options?.stdin, + }; + + // Execute the command + const result = Bun.spawnSync(cmd, bunOptions); + + // Convert Bun result to SpawnSyncResult + return { + exitCode: result.exitCode, + stdout: result.stdout ?? new Uint8Array(), + stderr: result.stderr ?? new Uint8Array(), + }; + } +} + +/** + * Singleton instance for convenient access + */ +export const bunShell = new BunShell(); diff --git a/src/adapters/memory-file-system.test.ts b/src/adapters/memory-file-system.test.ts new file mode 100644 index 0000000..4ffee1b --- /dev/null +++ b/src/adapters/memory-file-system.test.ts @@ -0,0 +1,92 @@ +import { describe, expect, test } from "bun:test"; +import { MemoryFileSystem } from "./memory-file-system"; + +describe("MemoryFileSystem", () => { + describe("writeFileSync", () => { + test("writes file content synchronously", async () => { + const fs = new MemoryFileSystem(); + + fs.writeFileSync("/test/file.txt", "hello world"); + + expect(await fs.readFile("/test/file.txt")).toBe("hello world"); + }); + + test("creates parent directories automatically", async () => { + const fs = new MemoryFileSystem(); + + fs.writeFileSync("/deep/nested/path/file.txt", "content"); + + expect(await fs.exists("/deep")).toBe(true); + expect(await fs.exists("/deep/nested")).toBe(true); + expect(await fs.exists("/deep/nested/path")).toBe(true); + expect(await fs.readFile("/deep/nested/path/file.txt")).toBe("content"); + }); + + test("overwrites existing file", async () => { + const fs = new MemoryFileSystem(); + + fs.writeFileSync("/file.txt", "original"); + fs.writeFileSync("/file.txt", "updated"); + + expect(await fs.readFile("/file.txt")).toBe("updated"); + }); + }); + + describe("mkdirSync", () => { + test("creates directory synchronously", async () => { + const fs = new MemoryFileSystem(); + + fs.mkdirSync("/newdir"); + + expect(await fs.exists("/newdir")).toBe(true); + }); + + test("creates nested directories with recursive option", async () => { + const fs = new MemoryFileSystem(); + + fs.mkdirSync("/a/b/c/d", { recursive: true }); + + expect(await fs.exists("/a")).toBe(true); + expect(await fs.exists("/a/b")).toBe(true); + expect(await fs.exists("/a/b/c")).toBe(true); + expect(await fs.exists("/a/b/c/d")).toBe(true); + }); + + test("throws error without recursive when parent doesn't exist", () => { + const fs = new MemoryFileSystem(); + + expect(() => fs.mkdirSync("/parent/child")).toThrow("ENOENT"); + }); + + test("silently succeeds when directory already exists", async () => { + const fs = new MemoryFileSystem(); + + fs.mkdirSync("/existing", { recursive: true }); + fs.mkdirSync("/existing", { recursive: true }); // Should not throw + + expect(await fs.exists("/existing")).toBe(true); + }); + }); + + describe("sync methods work together in mock handlers", () => { + test("simulates git clone behavior", async () => { + const fs = new MemoryFileSystem(); + + // Simulate what a shell mock handler would do + const installLocation = "/test/marketplaces/my-repo"; + fs.mkdirSync(`${installLocation}/.claude-plugin`, { recursive: true }); + fs.writeFileSync( + `${installLocation}/.claude-plugin/marketplace.json`, + JSON.stringify({ name: "my-repo", plugins: [] }), + ); + + // Verify the structure was created correctly + expect(await fs.exists(installLocation)).toBe(true); + expect(await fs.exists(`${installLocation}/.claude-plugin`)).toBe(true); + const content = await fs.readFile( + `${installLocation}/.claude-plugin/marketplace.json`, + ); + expect(JSON.parse(content)).toEqual({ name: "my-repo", plugins: [] }); + }); + }); +}); diff --git a/src/adapters/memory-file-system.ts b/src/adapters/memory-file-system.ts new file mode 100644 index 0000000..e897d33 --- /dev/null +++ b/src/adapters/memory-file-system.ts @@ -0,0 +1,431 @@ +import { join, dirname } from 'node:path'; +import type { + FileSystem, + FileStat, + MkdirOptions, + RmOptions, + CpOptions, +} from '../interfaces/file-system'; + +/** + * File stat implementation for in-memory file system + */ +class MemoryStat implements FileStat { + constructor(private type: 'file' | 'directory') {} + + isDirectory(): boolean { + return this.type === 'directory'; + } + + isFile(): boolean { + return this.type === 'file'; + } +} + +/** + * In-memory file system implementation for testing + */ +export class MemoryFileSystem implements FileSystem { + private files: Map; + private directories: Set; + + constructor() { + this.files = new Map(); + this.directories = new Set(); + // Always have root directory + this.directories.add('/'); + } + + /** + * Normalizes paths to absolute paths with forward slashes + */ + private normalizePath(path: string): string { + // Ensure absolute path + const absolute = path.startsWith('/') ? path : `/${path}`; + // Normalize: remove duplicate slashes and trailing slashes (except root) + let normalized = absolute.replace(/\/+/g, '/'); + if (normalized !== '/' && normalized.endsWith('/')) { + normalized = normalized.slice(0, -1); + } + return normalized; + } + + /** + * Ensures all parent directories exist + */ + private ensureParentDirectories(path: string): void { + const normalized = this.normalizePath(path); + let current = ''; + const parts = normalized.split('/').filter((p) => p.length > 0); + + for (const part of parts) { + current = current ? join(current, part) : '/' + part; + this.directories.add(current); + } + } + + async readFile(path: string): Promise { + const normalized = this.normalizePath(path); + + if (!this.files.has(normalized)) { + throw new Error(`ENOENT: no such file or directory, open '${path}'`); + } + + const content = this.files.get(normalized); + return content!; + } + + async writeFile(path: string, content: string): Promise { + const normalized = this.normalizePath(path); + + // Ensure parent directories exist + this.ensureParentDirectories(normalized); + + this.files.set(normalized, content); + } + + async exists(path: string): Promise { + const normalized = this.normalizePath(path); + return this.files.has(normalized) || this.directories.has(normalized); + } + + async mkdir(path: string, options?: MkdirOptions): Promise { + const normalized = this.normalizePath(path); + + if (this.directories.has(normalized)) { + // Directory already exists - not an error in most mkdir implementations + return; + } + + if (this.files.has(normalized)) { + throw new Error( + `EEXIST: file already exists, mkdir '${path}'` + ); + } + + if (options?.recursive) { + // Create all parent directories + this.ensureParentDirectories(normalized); + this.directories.add(normalized); + } else { + // Check if parent exists + const parent = dirname(normalized); + if (parent !== '/' && !this.directories.has(parent)) { + throw new Error( + `ENOENT: no such file or directory, mkdir '${path}'` + ); + } + this.directories.add(normalized); + } + } + + async rm(path: string, options?: RmOptions): Promise { + const normalized = this.normalizePath(path); + + if (!this.files.has(normalized) && !this.directories.has(normalized)) { + if (options?.force) { + return; // Force means don't error on missing paths + } + throw new Error(`ENOENT: no such file or directory, rm '${path}'`); + } + + if (this.directories.has(normalized)) { + // It's a directory + const contents = this.getDirectoryContents(normalized); + + if (contents.length > 0 && !options?.recursive) { + throw new Error( + `EISDIR: illegal operation on a directory, rm '${path}'` + ); + } + + if (options?.recursive) { + // Remove all children recursively + this.removeDirectoryRecursive(normalized); + } + + this.directories.delete(normalized); + } else { + // It's a file + this.files.delete(normalized); + } + } + + async readdir(path: string): Promise { + const normalized = this.normalizePath(path); + + if (!this.directories.has(normalized)) { + if (this.files.has(normalized)) { + throw new Error( + `ENOTDIR: not a directory, scandir '${path}'` + ); + } + throw new Error(`ENOENT: no such file or directory, scandir '${path}'`); + } + + return this.getDirectoryContents(normalized); + } + + async stat(path: string): Promise { + const normalized = this.normalizePath(path); + + if (this.files.has(normalized)) { + return new MemoryStat('file'); + } + + if (this.directories.has(normalized)) { + return new MemoryStat('directory'); + } + + throw new Error(`ENOENT: no such file or directory, stat '${path}'`); + } + + async cp(src: string, dest: string, options?: CpOptions): Promise { + const srcNorm = this.normalizePath(src); + const destNorm = this.normalizePath(dest); + + if (!this.files.has(srcNorm) && !this.directories.has(srcNorm)) { + throw new Error(`ENOENT: no such file or directory, cp '${src}'`); + } + + const destExists = await this.exists(destNorm); + if (destExists && !options?.force) { + throw new Error(`EEXIST: file already exists, cp '${dest}'`); + } + + if (this.files.has(srcNorm)) { + // Copy file + const content = this.files.get(srcNorm)!; + this.ensureParentDirectories(destNorm); + this.files.set(destNorm, content); + } else { + // Copy directory + if (!options?.recursive) { + throw new Error( + `EISDIR: illegal operation on a directory, cp '${src}'` + ); + } + + // Create destination directory + this.directories.add(destNorm); + this.ensureParentDirectories(destNorm); + + // Copy all files recursively + this.copyDirectoryRecursive(srcNorm, destNorm); + } + } + + /** + * Gets the contents of a directory (immediate children only) + */ + private getDirectoryContents(dirPath: string): string[] { + const contents = new Set(); + const dirNormalized = dirPath === '/' ? dirPath : dirPath; + const prefix = + dirNormalized === '/' ? '/' : dirNormalized + '/'; + + // Find all files that start with this directory + for (const filePath of this.files.keys()) { + if (filePath.startsWith(prefix)) { + const relative = filePath.slice(prefix.length); + // Only include immediate children (no nested paths) + if (relative.includes('/')) { + const firstPart = relative.split('/')[0]; + if (firstPart) contents.add(firstPart); + } else { + contents.add(relative); + } + } + } + + // Find all directories that start with this directory + for (const subDir of this.directories) { + if (subDir === dirNormalized) continue; // Skip self + + if (subDir.startsWith(prefix)) { + const relative = subDir.slice(prefix.length); + // Only include immediate children + if (relative.includes('/')) { + const firstPart = relative.split('/')[0]; + if (firstPart) contents.add(firstPart); + } else { + contents.add(relative); + } + } + } + + return Array.from(contents).sort(); + } + + /** + * Removes a directory and all its contents recursively + */ + private removeDirectoryRecursive(dirPath: string): void { + const prefix = + dirPath === '/' ? '/' : dirPath + '/'; + + // Remove all files under this directory + const filesToRemove: string[] = []; + for (const filePath of this.files.keys()) { + if (filePath.startsWith(prefix)) { + filesToRemove.push(filePath); + } + } + for (const filePath of filesToRemove) { + this.files.delete(filePath); + } + + // Remove all subdirectories + const dirsToRemove: string[] = []; + for (const subDir of this.directories) { + if (subDir !== dirPath && subDir.startsWith(prefix)) { + dirsToRemove.push(subDir); + } + } + for (const subDir of dirsToRemove) { + this.directories.delete(subDir); + } + } + + /** + * Copies a directory and all its contents recursively + */ + private copyDirectoryRecursive( + srcDir: string, + destDir: string + ): void { + const srcPrefix = srcDir === '/' ? '/' : srcDir + '/'; + const destPrefix = destDir === '/' ? '/' : destDir + '/'; + + // Copy all files + for (const filePath of this.files.keys()) { + if (filePath.startsWith(srcPrefix)) { + const relative = filePath.slice(srcPrefix.length); + const newPath = destPrefix + relative; + const content = this.files.get(filePath)!; + this.ensureParentDirectories(newPath); + this.files.set(newPath, content); + } + } + + // Copy all subdirectories + for (const subDir of this.directories) { + if (subDir !== srcDir && subDir.startsWith(srcPrefix)) { + const relative = subDir.slice(srcPrefix.length); + const newDir = destPrefix + relative; + this.directories.add(newDir); + } + } + } + + /** + * Synchronous write file for use in mock handlers + * The async version just wraps this - no actual async work is done + */ + writeFileSync(path: string, content: string): void { + const normalized = this.normalizePath(path); + this.ensureParentDirectories(normalized); + this.files.set(normalized, content); + } + + /** + * Synchronous mkdir for use in mock handlers + * The async version just wraps this - no actual async work is done + */ + mkdirSync(path: string, options?: MkdirOptions): void { + const normalized = this.normalizePath(path); + + if (this.directories.has(normalized)) { + return; + } + + if (this.files.has(normalized)) { + throw new Error(`EEXIST: file already exists, mkdir '${path}'`); + } + + if (options?.recursive) { + this.ensureParentDirectories(normalized); + this.directories.add(normalized); + } else { + const parent = normalized.substring(0, normalized.lastIndexOf("/")) || "/"; + if (!this.directories.has(parent)) { + throw new Error(`ENOENT: no such file or directory, mkdir '${path}'`); + } + this.directories.add(normalized); + } + } +} + +/** + * Builder class for fluent API + */ +class MemoryFileSystemBuilder { + private fs: MemoryFileSystem; + + constructor() { + this.fs = new MemoryFileSystem(); + } + + /** + * Adds a file to the file system + */ + withFile(path: string, content: string): this { + // Use non-async version directly since we're in the builder + const normalized = path.startsWith('/') ? path : `/${path}`; + const cleanPath = normalized.replace(/\/+/g, '/'); + + // Ensure parent directories + const parts = cleanPath.split('/').filter((p) => p.length > 0); + let current = ''; + for (const part of parts.slice(0, -1)) { + current = current ? `${current}/${part}` : `/${part}`; + this.fs['directories'].add(current); + } + + this.fs['files'].set(cleanPath, content); + return this; + } + + /** + * Adds a directory to the file system + */ + withDirectory(path: string): this { + const normalized = path.startsWith('/') ? path : `/${path}`; + let cleanPath = normalized.replace(/\/+/g, '/'); + if (cleanPath !== '/' && cleanPath.endsWith('/')) { + cleanPath = cleanPath.slice(0, -1); + } + + // Ensure parent directories + const parts = cleanPath.split('/').filter((p) => p.length > 0); + let current = ''; + for (const part of parts) { + current = current ? `${current}/${part}` : `/${part}`; + this.fs['directories'].add(current); + } + + return this; + } + + /** + * Builds and returns the MemoryFileSystem instance + */ + build(): MemoryFileSystem { + return this.fs; + } +} + +/** + * Factory function to create a MemoryFileSystem with fluent builder API + * + * @example + * ```typescript + * const fs = createMemoryFileSystem() + * .withFile('/path/to/file.txt', 'content') + * .withDirectory('/path/to/dir') + * .build(); + * ``` + */ +export function createMemoryFileSystem(): MemoryFileSystemBuilder { + return new MemoryFileSystemBuilder(); +} diff --git a/src/adapters/mock-process.ts b/src/adapters/mock-process.ts new file mode 100644 index 0000000..b42b6f6 --- /dev/null +++ b/src/adapters/mock-process.ts @@ -0,0 +1,78 @@ +import type { ProcessEnv } from '../interfaces/process'; + +/** + * Options for configuring a MockProcess instance + */ +export interface MockProcessOptions { + env?: Record; + cwd?: string; + homedir?: string; + pid?: number; +} + +/** + * Mock implementation of ProcessEnv for testing. + * Stores environment variables in a Map without touching the real process.env + */ +export class MockProcess implements ProcessEnv { + private envVars: Map; + private _cwd: string; + private _homedir: string; + private _pid: number; + + constructor(options: MockProcessOptions = {}) { + this.envVars = new Map(Object.entries(options.env ?? {})); + this._cwd = options.cwd ?? '/'; + this._homedir = options.homedir ?? '/home/user'; + this._pid = options.pid ?? 1000; + } + + /** + * Gets an environment variable value + */ + get(key: string): string | undefined { + return this.envVars.get(key); + } + + /** + * Sets an environment variable + */ + set(key: string, value: string): void { + this.envVars.set(key, value); + } + + /** + * Deletes an environment variable + */ + delete(key: string): void { + this.envVars.delete(key); + } + + /** + * Gets the current working directory + */ + cwd(): string { + return this._cwd; + } + + /** + * Gets the user's home directory + */ + homedir(): string { + return this._homedir; + } + + /** + * Gets the process ID + */ + pid(): number { + return this._pid; + } +} + +/** + * Factory function to create a MockProcess instance + */ +export function createMockProcess(options?: MockProcessOptions): MockProcess { + return new MockProcess(options); +} diff --git a/src/adapters/mock-shell.ts b/src/adapters/mock-shell.ts new file mode 100644 index 0000000..d1affe0 --- /dev/null +++ b/src/adapters/mock-shell.ts @@ -0,0 +1,73 @@ +import type { Shell, SpawnOptions, SpawnSyncResult } from "../interfaces/shell"; + +/** + * Handler function for mock shell + * Receives command and options, returns the result + */ +export type ShellHandler = ( + cmd: string[], + options?: SpawnOptions +) => SpawnSyncResult; + +/** + * Tracks a call to the mock shell + */ +interface CommandCall { + cmd: string[]; + options?: SpawnOptions; +} + +/** + * Mock implementation of Shell for testing + */ +export class MockShell implements Shell { + private handler: ShellHandler; + private commandCalls: CommandCall[] = []; + + constructor(handler?: ShellHandler) { + this.handler = + handler || + (() => ({ + exitCode: 0, + stdout: new Uint8Array(), + stderr: new Uint8Array(), + })); + } + + /** + * Spawns a command synchronously + * Tracks the call for assertions and delegates to the handler + */ + spawnSync(cmd: string[], options?: SpawnOptions): SpawnSyncResult { + this.commandCalls.push({ cmd, options }); + return this.handler(cmd, options); + } + + /** + * Get all commands that were called + */ + get calls(): CommandCall[] { + return this.commandCalls; + } + + /** + * Change the handler mid-test + */ + setHandler(handler: ShellHandler): void { + this.handler = handler; + } + + /** + * Reset the call history + */ + reset(): void { + this.commandCalls = []; + } +} + +/** + * Factory function to create a MockShell instance + */ +export function createMockShell(handler?: ShellHandler): MockShell { + return new MockShell(handler); +} diff --git a/src/adapters/node-process.ts b/src/adapters/node-process.ts new file mode 100644 index 0000000..70b8d2d --- /dev/null +++ b/src/adapters/node-process.ts @@ -0,0 +1,57 @@ +import * as os from "os"; +import type { ProcessEnv } from "../interfaces/process"; + +/** + * Node.js/Bun implementation of the ProcessEnv interface. + * Provides access to environment variables and system information + * using Node.js built-in APIs. + */ +export class NodeProcess implements ProcessEnv { + /** + * Gets an environment variable value + */ + get(key: string): string | undefined { + return process.env[key]; + } + + /** + * Sets an environment variable + */ + set(key: string, value: string): void { + process.env[key] = value; + } + + /** + * Deletes an environment variable + */ + delete(key: string): void { + delete process.env[key]; + } + + /** + * Gets the current working directory + */ + cwd(): string { + return process.cwd(); + } + + /** + * Gets the user's home directory + */ + homedir(): string { + return os.homedir(); + } + + /** + * Gets the process ID + */ + pid(): number { + return process.pid; + } +} + +/** + * Singleton instance of NodeProcess for convenience. + * Use this as the default implementation when you need a ProcessEnv instance. + */ +export const nodeProcess = new NodeProcess(); diff --git a/src/cache.test.ts b/src/cache.integration.test.ts similarity index 100% rename from src/cache.test.ts rename to src/cache.integration.test.ts diff --git a/src/cache.ts b/src/cache.ts index f444b59..ff665c8 100644 --- a/src/cache.ts +++ b/src/cache.ts @@ -1,52 +1,42 @@ import { join } from "node:path"; -import { homedir } from "node:os"; -import { mkdirSync, rmSync, existsSync, readdirSync, readFileSync, writeFileSync } from "node:fs"; -import { cp } from "node:fs/promises"; +// Sync fs imports used by backward-compatible functions (initCache, cleanupCache) +// that predate the async FileSystem interface. These functions are kept for +// existing consumers but new code should use createCache() instead. +import { mkdirSync, rmSync, existsSync } from "node:fs"; import type { PluginInfo } from "./scanner"; +import type { FileSystem } from "./interfaces/file-system"; +import type { ProcessEnv } from "./interfaces/process"; +import { bunFileSystem } from "./adapters/bun-file-system"; +import { nodeProcess } from "./adapters/node-process"; import { expandEnvVariables, expandEnvInObject } from "./env-expansion"; -let instanceId: string; -let cacheDir: string; +/** + * Dependencies for cache operations + */ +export interface CacheDependencies { + fs?: FileSystem; + process?: ProcessEnv; +} + +/** + * Cache instance interface with encapsulated state + */ +export interface CacheInstance { + readonly cacheDir: string; + getCachedPlugin(plugin: PluginInfo): Promise; + cleanup(): void; +} /** * Gets the cache root directory. * Prefers $XDG_CACHE_HOME, falls back to ~/.cache */ -function getCacheRoot(): string { - const xdgCacheHome = process.env.XDG_CACHE_HOME; +function getCacheRoot(proc: ProcessEnv): string { + const xdgCacheHome = proc.get("XDG_CACHE_HOME"); if (xdgCacheHome) { return join(xdgCacheHome, "construct", "plugins"); } - return join(homedir(), ".cache", "construct", "plugins"); -} - -/** - * Initializes cache for this construct instance. - * Generates unique instance ID and registers cleanup handlers. - * @returns Instance cache directory path - */ -export function initCache(): string { - // Generate unique instance ID: pid + timestamp - instanceId = `${process.pid}-${Date.now()}`; - - // Create cache directory - const cacheRoot = getCacheRoot(); - cacheDir = join(cacheRoot, instanceId); - - mkdirSync(cacheDir, { recursive: true }); - - // Register cleanup handlers - process.on("exit", cleanupCache); - process.on("SIGINT", () => { - cleanupCache(); - process.exit(130); // Standard exit code for SIGINT - }); - process.on("SIGTERM", () => { - cleanupCache(); - process.exit(143); // Standard exit code for SIGTERM - }); - - return cacheDir; + return join(proc.homedir(), ".cache", "construct", "plugins"); } /** @@ -106,20 +96,23 @@ function reconstructMarkdown( /** * Expands environment variables in .mcp.json file. */ -async function expandMcpJson(filePath: string, localEnv: Record): Promise { +async function expandMcpJson( + fs: FileSystem, + filePath: string, + localEnv: Record +): Promise { try { - const file = Bun.file(filePath); - if (!(await file.exists())) { + if (!(await fs.exists(filePath))) { return; } - const content = await file.text(); + const content = await fs.readFile(filePath); const parsed = JSON.parse(content); // Recursively expand all string values in the JSON object const expanded = expandEnvInObject(parsed, localEnv); - await Bun.write(filePath, JSON.stringify(expanded, null, 2)); + await fs.writeFile(filePath, JSON.stringify(expanded, null, 2)); } catch (error) { // Skip if file doesn't exist or can't be parsed } @@ -128,13 +121,17 @@ async function expandMcpJson(filePath: string, localEnv: Record) /** * Expands environment variables in markdown frontmatter. */ -function expandMarkdownFrontmatter(filePath: string, localEnv: Record): void { +async function expandMarkdownFrontmatter( + fs: FileSystem, + filePath: string, + localEnv: Record +): Promise { try { - if (!existsSync(filePath)) { + if (!(await fs.exists(filePath))) { return; } - const content = readFileSync(filePath, "utf-8"); + const content = await fs.readFile(filePath); const { frontmatter, body, hasFrontmatter } = parseFrontmatter(content); if (!hasFrontmatter || frontmatter === null) { @@ -149,7 +146,7 @@ function expandMarkdownFrontmatter(filePath: string, localEnv: Record ): Promise { // Expand .mcp.json const mcpJsonPath = join(cachedPath, ".mcp.json"); - await expandMcpJson(mcpJsonPath, localEnv); + await expandMcpJson(fs, mcpJsonPath, localEnv); // Expand agents/*.md frontmatter const agentsDir = join(cachedPath, "agents"); - if (existsSync(agentsDir)) { + if (await fs.exists(agentsDir)) { try { - const agents = readdirSync(agentsDir); + const agents = await fs.readdir(agentsDir); for (const agent of agents) { if (agent.endsWith(".md")) { const agentPath = join(agentsDir, agent); - expandMarkdownFrontmatter(agentPath, localEnv); + await expandMarkdownFrontmatter(fs, agentPath, localEnv); } } } catch (error) { @@ -184,12 +182,12 @@ async function expandCachedPluginFiles( // Expand skills/*/SKILL.md frontmatter const skillsDir = join(cachedPath, "skills"); - if (existsSync(skillsDir)) { + if (await fs.exists(skillsDir)) { try { - const skillDirs = readdirSync(skillsDir); + const skillDirs = await fs.readdir(skillsDir); for (const skillDir of skillDirs) { const skillPath = join(skillsDir, skillDir, "SKILL.md"); - expandMarkdownFrontmatter(skillPath, localEnv); + await expandMarkdownFrontmatter(fs, skillPath, localEnv); } } catch (error) { // Skip if skills directory can't be read @@ -197,13 +195,127 @@ async function expandCachedPluginFiles( } } +/** + * Creates a new cache instance with encapsulated state. + * @param deps Optional dependencies for file system and process + * @returns CacheInstance with methods to manage plugin caching + */ +export function createCache(deps?: CacheDependencies): CacheInstance { + const fs = deps?.fs ?? bunFileSystem; + const proc = deps?.process ?? nodeProcess; + + // Generate unique instance ID: pid + timestamp + const instanceId = `${proc.pid()}-${Date.now()}`; + + // Create cache directory path + const cacheRoot = getCacheRoot(proc); + const cacheDir = join(cacheRoot, instanceId); + + // Create cache directory synchronously during initialization + // We use a self-executing async function stored in a promise + let initialized = false; + const initPromise = fs.mkdir(cacheDir, { recursive: true }).then(() => { + initialized = true; + }); + + async function ensureInitialized(): Promise { + if (!initialized) { + await initPromise; + } + } + + return { + get cacheDir(): string { + return cacheDir; + }, + + async getCachedPlugin(plugin: PluginInfo): Promise { + await ensureInitialized(); + + // Parse plugin name to get marketplace and plugin name + // Format: "plugin-name@marketplace" + const [pluginName, marketplace] = plugin.name.split("@"); + + // Ensure we have valid parts + if (!pluginName || !marketplace) { + throw new Error( + `Invalid plugin name format: ${plugin.name}. Expected "plugin-name@marketplace"` + ); + } + + // Create cache structure: /// + const marketplaceDir = join(cacheDir, marketplace); + const cachedPluginDir = join(marketplaceDir, pluginName); + + // Copy plugin from install path to cache + await fs.mkdir(cachedPluginDir, { recursive: true }); + await fs.cp(plugin.installPath, cachedPluginDir, { + recursive: true, + force: true, + }); + + // Set CLAUDE_PLUGIN_ROOT to the cached path + const localEnv = { CLAUDE_PLUGIN_ROOT: cachedPluginDir }; + + // Expand environment variables in cached files + await expandCachedPluginFiles(fs, cachedPluginDir, localEnv); + + return cachedPluginDir; + }, + + cleanup(): void { + // Use fire-and-forget pattern for cleanup + fs.rm(cacheDir, { recursive: true, force: true }).catch(() => { + // Silently ignore cleanup errors + }); + }, + }; +} + +// ============================================================================ +// Backward compatible functions using default cache instance +// ============================================================================ + +// Module-level state for backward compatibility +let defaultCacheDir: string | null = null; + +/** + * Initializes cache for this construct instance. + * Generates unique instance ID and registers cleanup handlers. + * @returns Instance cache directory path + */ +export function initCache(): string { + // Generate unique instance ID: pid + timestamp + const instanceId = `${process.pid}-${Date.now()}`; + + // Create cache directory path + const cacheRoot = getCacheRoot(nodeProcess); + defaultCacheDir = join(cacheRoot, instanceId); + + // Create directory synchronously for backward compatibility + mkdirSync(defaultCacheDir, { recursive: true }); + + // Register cleanup handlers + process.on("exit", cleanupCache); + process.on("SIGINT", () => { + cleanupCache(); + process.exit(130); // Standard exit code for SIGINT + }); + process.on("SIGTERM", () => { + cleanupCache(); + process.exit(143); // Standard exit code for SIGTERM + }); + + return defaultCacheDir; +} + /** * Gets or creates a cached copy of a plugin with expanded env vars. * CLAUDE_PLUGIN_ROOT is set to the destination cache path during expansion. * @returns Path to cached plugin directory */ export async function getCachedPlugin(plugin: PluginInfo): Promise { - if (!cacheDir) { + if (!defaultCacheDir) { throw new Error("Cache not initialized. Call initCache() first."); } @@ -213,22 +325,27 @@ export async function getCachedPlugin(plugin: PluginInfo): Promise { // Ensure we have valid parts if (!pluginName || !marketplace) { - throw new Error(`Invalid plugin name format: ${plugin.name}. Expected "plugin-name@marketplace"`); + throw new Error( + `Invalid plugin name format: ${plugin.name}. Expected "plugin-name@marketplace"` + ); } // Create cache structure: /// - const marketplaceDir = join(cacheDir, marketplace); + const marketplaceDir = join(defaultCacheDir, marketplace); const cachedPluginDir = join(marketplaceDir, pluginName); // Copy plugin from install path to cache - mkdirSync(cachedPluginDir, { recursive: true }); - await cp(plugin.installPath, cachedPluginDir, { recursive: true, force: true }); + await bunFileSystem.mkdir(cachedPluginDir, { recursive: true }); + await bunFileSystem.cp(plugin.installPath, cachedPluginDir, { + recursive: true, + force: true, + }); // Set CLAUDE_PLUGIN_ROOT to the cached path const localEnv = { CLAUDE_PLUGIN_ROOT: cachedPluginDir }; // Expand environment variables in cached files - await expandCachedPluginFiles(cachedPluginDir, localEnv); + await expandCachedPluginFiles(bunFileSystem, cachedPluginDir, localEnv); return cachedPluginDir; } @@ -238,9 +355,9 @@ export async function getCachedPlugin(plugin: PluginInfo): Promise { * Called automatically on process exit. */ export function cleanupCache(): void { - if (cacheDir && existsSync(cacheDir)) { + if (defaultCacheDir && existsSync(defaultCacheDir)) { try { - rmSync(cacheDir, { recursive: true, force: true }); + rmSync(defaultCacheDir, { recursive: true, force: true }); } catch (error) { // Silently ignore cleanup errors } @@ -251,12 +368,14 @@ export function cleanupCache(): void { * Clears ALL cached instances (for --clear-cache command). * Useful for cleaning up orphaned caches from crashed processes. */ -export async function clearAllCaches(): Promise { - const cacheRoot = getCacheRoot(); +export async function clearAllCaches(deps?: CacheDependencies): Promise { + const fs = deps?.fs ?? bunFileSystem; + const proc = deps?.process ?? nodeProcess; + const cacheRoot = getCacheRoot(proc); - if (existsSync(cacheRoot)) { + if (await fs.exists(cacheRoot)) { try { - rmSync(cacheRoot, { recursive: true, force: true }); + await fs.rm(cacheRoot, { recursive: true, force: true }); } catch (error) { // Silently ignore errors } diff --git a/src/cache.unit.test.ts b/src/cache.unit.test.ts new file mode 100644 index 0000000..19aa6f8 --- /dev/null +++ b/src/cache.unit.test.ts @@ -0,0 +1,388 @@ +import { describe, expect, test } from "bun:test"; +import { + createCache, + clearAllCaches, + type CacheDependencies, + type CacheInstance, +} from "./cache"; +import { + createMemoryFileSystem, + MemoryFileSystem, +} from "./adapters/memory-file-system"; +import { createMockProcess, MockProcess } from "./adapters/mock-process"; +import type { PluginInfo } from "./scanner"; + +/** + * Helper to create test dependencies + */ +function createTestDeps(options?: { + env?: Record; + pid?: number; + homedir?: string; +}): { fs: MemoryFileSystem; process: MockProcess; deps: CacheDependencies } { + const fs = createMemoryFileSystem().build(); + const mockProcess = createMockProcess({ + env: options?.env ?? {}, + pid: options?.pid ?? 1234, + homedir: options?.homedir ?? "/home/testuser", + }); + return { fs, process: mockProcess, deps: { fs, process: mockProcess } }; +} + +/** + * Helper to create a mock plugin in the memory file system + */ +async function createMockPlugin( + fs: MemoryFileSystem, + installPath: string, + options?: { + mcpJson?: Record; + agents?: Array<{ name: string; content: string }>; + skills?: Array<{ name: string; content: string }>; + } +): Promise { + await fs.mkdir(installPath, { recursive: true }); + + // Create .mcp.json + if (options?.mcpJson) { + await fs.writeFile( + `${installPath}/.mcp.json`, + JSON.stringify(options.mcpJson, null, 2) + ); + } + + // Create agents + if (options?.agents) { + const agentsDir = `${installPath}/agents`; + await fs.mkdir(agentsDir, { recursive: true }); + for (const agent of options.agents) { + await fs.writeFile(`${agentsDir}/${agent.name}`, agent.content); + } + } + + // Create skills + if (options?.skills) { + for (const skill of options.skills) { + const skillDir = `${installPath}/skills/${skill.name}`; + await fs.mkdir(skillDir, { recursive: true }); + await fs.writeFile(`${skillDir}/SKILL.md`, skill.content); + } + } + + return { + name: "test-plugin@test-marketplace", + installPath, + version: "1.0.0", + components: [], + }; +} + +describe("createCache", () => { + test("creates instance directory with pid and timestamp", async () => { + const { fs, deps } = createTestDeps({ pid: 5678 }); + + const cache = createCache(deps); + + // cacheDir should contain the PID + expect(cache.cacheDir).toContain("5678"); + + // Wait for async mkdir to complete + await new Promise((resolve) => setTimeout(resolve, 10)); + + // Directory should exist + expect(await fs.exists(cache.cacheDir)).toBe(true); + }); + + test("uses XDG_CACHE_HOME when set", async () => { + const { deps } = createTestDeps({ + env: { XDG_CACHE_HOME: "/custom/cache" }, + pid: 1111, + }); + + const cache = createCache(deps); + + expect(cache.cacheDir).toContain("/custom/cache/construct/plugins"); + }); + + test("uses homedir/.cache when XDG_CACHE_HOME not set", async () => { + const { deps } = createTestDeps({ + homedir: "/home/alice", + pid: 2222, + }); + + const cache = createCache(deps); + + expect(cache.cacheDir).toContain("/home/alice/.cache/construct/plugins"); + }); +}); + +describe("getCachedPlugin", () => { + test("copies plugin files to cache directory", async () => { + const { fs, deps } = createTestDeps({ pid: 3333 }); + + // Create source plugin + const plugin = await createMockPlugin(fs, "/plugins/source", { + mcpJson: { name: "test" }, + }); + + const cache = createCache(deps); + const cachedPath = await cache.getCachedPlugin(plugin); + + // Verify cached files exist + expect(await fs.exists(cachedPath)).toBe(true); + expect(await fs.exists(`${cachedPath}/.mcp.json`)).toBe(true); + + // Verify it's in the cache directory structure + expect(cachedPath).toContain(cache.cacheDir); + expect(cachedPath).toContain("test-marketplace"); + expect(cachedPath).toContain("test-plugin"); + }); + + test("copies agents and skills correctly", async () => { + const { fs, deps } = createTestDeps({ pid: 4444 }); + + const plugin = await createMockPlugin(fs, "/plugins/full", { + agents: [ + { + name: "agent1.md", + content: "---\nname: Agent 1\n---\nBody", + }, + ], + skills: [ + { + name: "skill1", + content: "---\nname: Skill 1\n---\nBody", + }, + ], + }); + + const cache = createCache(deps); + const cachedPath = await cache.getCachedPlugin(plugin); + + expect(await fs.exists(`${cachedPath}/agents/agent1.md`)).toBe(true); + expect(await fs.exists(`${cachedPath}/skills/skill1/SKILL.md`)).toBe(true); + }); + + test("expands CLAUDE_PLUGIN_ROOT in .mcp.json", async () => { + const { fs, deps } = createTestDeps({ pid: 5555 }); + + const plugin = await createMockPlugin(fs, "/plugins/expand-test", { + mcpJson: { + cwd: "${CLAUDE_PLUGIN_ROOT}", + endpoint: "file://${CLAUDE_PLUGIN_ROOT}/server.js", + }, + }); + + const cache = createCache(deps); + const cachedPath = await cache.getCachedPlugin(plugin); + + const mcpContent = await fs.readFile(`${cachedPath}/.mcp.json`); + const mcpData = JSON.parse(mcpContent); + + // CLAUDE_PLUGIN_ROOT should be expanded to cached path + expect(mcpData.cwd).toBe(cachedPath); + expect(mcpData.endpoint).toBe(`file://${cachedPath}/server.js`); + expect(mcpData.cwd).not.toContain("${CLAUDE_PLUGIN_ROOT}"); + }); + + test("expands environment variables in agent frontmatter", async () => { + const { fs, deps } = createTestDeps({ pid: 6666 }); + + const plugin = await createMockPlugin(fs, "/plugins/agent-expand", { + agents: [ + { + name: "test-agent.md", + content: `--- +name: Test Agent +path: \${CLAUDE_PLUGIN_ROOT}/bin/agent +--- + +This body should remain unchanged.`, + }, + ], + }); + + const cache = createCache(deps); + const cachedPath = await cache.getCachedPlugin(plugin); + + const agentContent = await fs.readFile(`${cachedPath}/agents/test-agent.md`); + + // Frontmatter should have expanded path + expect(agentContent).toContain(`path: ${cachedPath}/bin/agent`); + expect(agentContent).not.toContain("${CLAUDE_PLUGIN_ROOT}"); + // Body should be unchanged + expect(agentContent).toContain("This body should remain unchanged."); + }); + + test("expands environment variables in skill frontmatter", async () => { + const { fs, deps } = createTestDeps({ pid: 7777 }); + + const plugin = await createMockPlugin(fs, "/plugins/skill-expand", { + skills: [ + { + name: "my-skill", + content: `--- +name: My Skill +root: \${CLAUDE_PLUGIN_ROOT}/skills/my-skill +--- + +Skill body here.`, + }, + ], + }); + + const cache = createCache(deps); + const cachedPath = await cache.getCachedPlugin(plugin); + + const skillContent = await fs.readFile( + `${cachedPath}/skills/my-skill/SKILL.md` + ); + + expect(skillContent).toContain(`root: ${cachedPath}/skills/my-skill`); + expect(skillContent).not.toContain("${CLAUDE_PLUGIN_ROOT}"); + expect(skillContent).toContain("Skill body here."); + }); + + test("throws error for invalid plugin name format", async () => { + const { fs, deps } = createTestDeps({ pid: 8888 }); + + await fs.mkdir("/plugins/invalid", { recursive: true }); + const invalidPlugin: PluginInfo = { + name: "invalid-name-no-at-symbol", + installPath: "/plugins/invalid", + version: "1.0.0", + components: [], + }; + + const cache = createCache(deps); + + expect(cache.getCachedPlugin(invalidPlugin)).rejects.toThrow( + 'Invalid plugin name format' + ); + }); +}); + +describe("cleanup", () => { + test("removes instance cache directory", async () => { + const { fs, deps } = createTestDeps({ pid: 9999 }); + + const cache = createCache(deps); + + // Wait for directory to be created + await new Promise((resolve) => setTimeout(resolve, 10)); + expect(await fs.exists(cache.cacheDir)).toBe(true); + + cache.cleanup(); + + // Wait for async rm to complete + await new Promise((resolve) => setTimeout(resolve, 10)); + expect(await fs.exists(cache.cacheDir)).toBe(false); + }); + + test("cleanup is idempotent (no error when called multiple times)", async () => { + const { deps } = createTestDeps({ pid: 1010 }); + + const cache = createCache(deps); + await new Promise((resolve) => setTimeout(resolve, 10)); + + // Should not throw + cache.cleanup(); + await new Promise((resolve) => setTimeout(resolve, 10)); + cache.cleanup(); + await new Promise((resolve) => setTimeout(resolve, 10)); + }); +}); + +describe("clearAllCaches", () => { + test("removes all cache directories", async () => { + const { fs, deps } = createTestDeps({ + pid: 1111, + homedir: "/home/clear-test", + }); + + // Create multiple cache instances + const cache1 = createCache(deps); + const cache2 = createCache(deps); + + // Wait for directories to be created + await new Promise((resolve) => setTimeout(resolve, 10)); + + const cacheRoot = "/home/clear-test/.cache/construct/plugins"; + expect(await fs.exists(cache1.cacheDir)).toBe(true); + expect(await fs.exists(cache2.cacheDir)).toBe(true); + + await clearAllCaches(deps); + + // Root cache directory should be removed + expect(await fs.exists(cacheRoot)).toBe(false); + }); + + test("handles non-existent cache directory gracefully", async () => { + const { deps } = createTestDeps({ homedir: "/home/empty" }); + + // Should not throw even if cache doesn't exist + await clearAllCaches(deps); + }); + + test("uses XDG_CACHE_HOME when clearing", async () => { + const { fs, deps } = createTestDeps({ + env: { XDG_CACHE_HOME: "/xdg/cache" }, + pid: 1212, + }); + + const cache = createCache(deps); + await new Promise((resolve) => setTimeout(resolve, 10)); + + expect(cache.cacheDir).toContain("/xdg/cache"); + + await clearAllCaches(deps); + + expect(await fs.exists("/xdg/cache/construct/plugins")).toBe(false); + }); +}); + +describe("environment variable expansion", () => { + test("expands default values in .mcp.json", async () => { + const { fs, deps } = createTestDeps({ pid: 1313 }); + + const plugin = await createMockPlugin(fs, "/plugins/defaults", { + mcpJson: { + port: "${PORT:-3000}", + host: "${HOST:-localhost}", + }, + }); + + const cache = createCache(deps); + const cachedPath = await cache.getCachedPlugin(plugin); + + const mcpContent = await fs.readFile(`${cachedPath}/.mcp.json`); + const mcpData = JSON.parse(mcpContent); + + expect(mcpData.port).toBe("3000"); + expect(mcpData.host).toBe("localhost"); + }); + + test("handles nested objects in .mcp.json", async () => { + const { fs, deps } = createTestDeps({ pid: 1414 }); + + const plugin = await createMockPlugin(fs, "/plugins/nested", { + mcpJson: { + server: { + path: "${CLAUDE_PLUGIN_ROOT}/bin/server", + config: { + dir: "${CLAUDE_PLUGIN_ROOT}/config", + }, + }, + }, + }); + + const cache = createCache(deps); + const cachedPath = await cache.getCachedPlugin(plugin); + + const mcpContent = await fs.readFile(`${cachedPath}/.mcp.json`); + const mcpData = JSON.parse(mcpContent); + + expect(mcpData.server.path).toBe(`${cachedPath}/bin/server`); + expect(mcpData.server.config.dir).toBe(`${cachedPath}/config`); + }); +}); diff --git a/src/config.test.ts b/src/config.test.ts new file mode 100644 index 0000000..3cdef7f --- /dev/null +++ b/src/config.test.ts @@ -0,0 +1,206 @@ +import { describe, expect, it } from "bun:test"; +import { loadConfig, saveConfig, mergeCliWithConfig } from "./config"; +import type { ConfigDependencies } from "./config"; +import { createMemoryFileSystem } from "./adapters/memory-file-system"; +import { createMockProcess } from "./adapters/mock-process"; + +function createTestDeps( + files: Record = {}, + cwd = "/project" +): ConfigDependencies { + const builder = createMemoryFileSystem(); + for (const [path, content] of Object.entries(files)) { + builder.withFile(path, content); + } + return { + fs: builder.build(), + process: createMockProcess({ cwd }), + }; +} + +describe("loadConfig", () => { + it("returns null when file doesn't exist", async () => { + const deps = createTestDeps({}, "/project"); + const result = await loadConfig(deps); + expect(result).toBeNull(); + }); + + it("returns parsed config when valid", async () => { + const config = { + enabledPlugins: ["plugin1@marketplace", "plugin2@marketplace"], + lastUsed: "2024-01-15T10:30:00Z", + }; + const deps = createTestDeps( + { "/project/.construct.json": JSON.stringify(config) }, + "/project" + ); + + const result = await loadConfig(deps); + + expect(result).toEqual(config); + }); + + it("returns null when invalid JSON", async () => { + const deps = createTestDeps( + { "/project/.construct.json": "{ invalid json }" }, + "/project" + ); + + const result = await loadConfig(deps); + + expect(result).toBeNull(); + }); + + it("returns null when config structure is invalid", async () => { + const deps = createTestDeps( + { "/project/.construct.json": JSON.stringify({ foo: "bar" }) }, + "/project" + ); + + const result = await loadConfig(deps); + + expect(result).toBeNull(); + }); + + it("returns null when enabledPlugins is not an array", async () => { + const deps = createTestDeps( + { + "/project/.construct.json": JSON.stringify({ + enabledPlugins: "not-an-array", + lastUsed: "2024-01-15", + }), + }, + "/project" + ); + + const result = await loadConfig(deps); + + expect(result).toBeNull(); + }); + + it("returns null when lastUsed is not a string", async () => { + const deps = createTestDeps( + { + "/project/.construct.json": JSON.stringify({ + enabledPlugins: [], + lastUsed: 12345, + }), + }, + "/project" + ); + + const result = await loadConfig(deps); + + expect(result).toBeNull(); + }); +}); + +describe("saveConfig", () => { + it("writes correct JSON to file", async () => { + const fs = createMemoryFileSystem().withDirectory("/project").build(); + const process = createMockProcess({ cwd: "/project" }); + const deps: ConfigDependencies = { fs, process }; + + const config = { + enabledPlugins: ["plugin1@marketplace"], + lastUsed: "2024-01-15T10:30:00Z", + }; + + await saveConfig(config, deps); + + const content = await fs.readFile("/project/.construct.json"); + expect(JSON.parse(content)).toEqual(config); + }); + + it("formats JSON with 2-space indentation", async () => { + const fs = createMemoryFileSystem().withDirectory("/project").build(); + const process = createMockProcess({ cwd: "/project" }); + const deps: ConfigDependencies = { fs, process }; + + const config = { + enabledPlugins: ["plugin1@marketplace"], + lastUsed: "2024-01-15T10:30:00Z", + }; + + await saveConfig(config, deps); + + const content = await fs.readFile("/project/.construct.json"); + expect(content).toBe(JSON.stringify(config, null, 2)); + }); + + it("overwrites existing config", async () => { + const fs = createMemoryFileSystem() + .withFile( + "/project/.construct.json", + JSON.stringify({ enabledPlugins: ["old"], lastUsed: "old" }) + ) + .build(); + const process = createMockProcess({ cwd: "/project" }); + const deps: ConfigDependencies = { fs, process }; + + const newConfig = { + enabledPlugins: ["new@marketplace"], + lastUsed: "2024-01-16T10:30:00Z", + }; + + await saveConfig(newConfig, deps); + + const content = await fs.readFile("/project/.construct.json"); + expect(JSON.parse(content)).toEqual(newConfig); + }); +}); + +describe("mergeCliWithConfig", () => { + it("returns CLI plugins when provided", () => { + const cliPlugins = ["cli-plugin@marketplace"]; + const savedConfig = { + enabledPlugins: ["saved-plugin@marketplace"], + lastUsed: "2024-01-15", + }; + + const result = mergeCliWithConfig(cliPlugins, savedConfig); + + expect(result).toEqual(cliPlugins); + }); + + it("returns saved config plugins when CLI plugins empty", () => { + const cliPlugins: string[] = []; + const savedConfig = { + enabledPlugins: ["saved-plugin@marketplace"], + lastUsed: "2024-01-15", + }; + + const result = mergeCliWithConfig(cliPlugins, savedConfig); + + expect(result).toEqual(savedConfig.enabledPlugins); + }); + + it("returns empty array when no CLI plugins and no saved config", () => { + const result = mergeCliWithConfig([], null); + + expect(result).toEqual([]); + }); + + it("returns empty array when no CLI plugins and saved config has no plugins", () => { + const savedConfig = { + enabledPlugins: [], + lastUsed: "2024-01-15", + }; + + const result = mergeCliWithConfig([], savedConfig); + + expect(result).toEqual([]); + }); + + it("CLI plugins take precedence over saved config", () => { + const cliPlugins = ["a@m", "b@m"]; + const savedConfig = { + enabledPlugins: ["c@m", "d@m"], + lastUsed: "2024-01-15", + }; + + const result = mergeCliWithConfig(cliPlugins, savedConfig); + + expect(result).toEqual(["a@m", "b@m"]); + }); +}); diff --git a/src/config.ts b/src/config.ts index 60f2ca7..bcf532a 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,4 +1,8 @@ import { join } from "path"; +import type { FileSystem } from "./interfaces/file-system"; +import type { ProcessEnv } from "./interfaces/process"; +import { bunFileSystem } from "./adapters/bun-file-system"; +import { nodeProcess } from "./adapters/node-process"; const CONFIG_FILE = ".construct.json"; @@ -7,21 +11,38 @@ export interface ConstructConfig { lastUsed: string; } +/** + * Dependencies for config operations, allowing injection for testing. + */ +export interface ConfigDependencies { + fs?: FileSystem; + process?: ProcessEnv; +} + +const defaultDeps: Required = { + fs: bunFileSystem, + process: nodeProcess, +}; + /** * Load configuration from .construct.json in the current directory. * Returns null if the file doesn't exist or is invalid. */ -export async function loadConfig(): Promise { +export async function loadConfig( + deps?: ConfigDependencies +): Promise { + const { fs, process } = { ...defaultDeps, ...deps }; + try { const configPath = join(process.cwd(), CONFIG_FILE); - const file = Bun.file(configPath); - - if (!(await file.exists())) { + + if (!(await fs.exists(configPath))) { return null; } - - const config = await file.json(); - + + const content = await fs.readFile(configPath); + const config = JSON.parse(content); + // Validate config structure if ( typeof config === "object" && @@ -31,7 +52,7 @@ export async function loadConfig(): Promise { ) { return config as ConstructConfig; } - + return null; } catch (error) { // Handle parsing errors or file system errors gracefully @@ -42,9 +63,13 @@ export async function loadConfig(): Promise { /** * Save configuration to .construct.json in the current directory. */ -export async function saveConfig(config: ConstructConfig): Promise { +export async function saveConfig( + config: ConstructConfig, + deps?: ConfigDependencies +): Promise { + const { fs, process } = { ...defaultDeps, ...deps }; const configPath = join(process.cwd(), CONFIG_FILE); - await Bun.write(configPath, JSON.stringify(config, null, 2)); + await fs.writeFile(configPath, JSON.stringify(config, null, 2)); } /** diff --git a/src/executor.ts b/src/executor.ts index 52b2445..fb2ebe9 100644 --- a/src/executor.ts +++ b/src/executor.ts @@ -1,4 +1,6 @@ import type { TranslatedAgent } from './agent-translator'; +import type { Shell } from './interfaces/shell'; +import { bunShell } from './adapters/bun-shell'; import { unlinkSync } from 'node:fs'; export interface ExecutorOptions { @@ -8,6 +10,11 @@ export interface ExecutorOptions { translatedAgents: TranslatedAgent[]; } +export interface ExecutorDependencies { + shell?: Shell; + env?: Record; +} + function setupCleanup(translatedAgents: TranslatedAgent[]) { let cleanedUp = false; @@ -35,10 +42,14 @@ function setupCleanup(translatedAgents: TranslatedAgent[]) { }); } -export function executeCopilot(options: ExecutorOptions): number { +export function executeCopilot(options: ExecutorOptions, deps?: ExecutorDependencies): number { setupCleanup(options.translatedAgents); const { env, additionalMcpConfig, passthroughArgs } = options; + // Use injected dependencies or defaults + const shell = deps?.shell ?? bunShell; + const baseEnv = deps?.env ?? Bun.env; + // Build args array const args: string[] = []; if (additionalMcpConfig) { @@ -46,16 +57,18 @@ export function executeCopilot(options: ExecutorOptions): number { } args.push(...passthroughArgs); - // Merge env with current process env + // Merge env with base env const mergedEnv = { - ...Bun.env, + ...baseEnv, ...env, }; // Spawn copilot subprocess - const result = Bun.spawnSync(['copilot', ...args], { + const result = shell.spawnSync(['copilot', ...args], { env: mergedEnv, - stdio: ['inherit', 'inherit', 'inherit'], + stdin: 'inherit', + stdout: 'inherit', + stderr: 'inherit', }); return result.exitCode ?? 1; diff --git a/src/interfaces/file-system.ts b/src/interfaces/file-system.ts new file mode 100644 index 0000000..7dd0bf2 --- /dev/null +++ b/src/interfaces/file-system.ts @@ -0,0 +1,76 @@ +/** + * File statistics interface + */ +export interface FileStat { + isDirectory(): boolean; + isFile(): boolean; +} + +/** + * Options for mkdir operation + */ +export interface MkdirOptions { + recursive?: boolean; +} + +/** + * Options for rm operation + */ +export interface RmOptions { + recursive?: boolean; + force?: boolean; +} + +/** + * Options for cp operation + */ +export interface CpOptions { + recursive?: boolean; + force?: boolean; +} + +/** + * Abstraction over file system operations. + * Enables dependency injection for testing without real file I/O. + */ +export interface FileSystem { + /** + * Reads file content as UTF-8 string + */ + readFile(path: string): Promise; + + /** + * Writes content to file, creating parent directories if needed + */ + writeFile(path: string, content: string): Promise; + + /** + * Checks if path exists + */ + exists(path: string): Promise; + + /** + * Creates directory, optionally with parents + */ + mkdir(path: string, options?: MkdirOptions): Promise; + + /** + * Removes file or directory + */ + rm(path: string, options?: RmOptions): Promise; + + /** + * Lists directory contents + */ + readdir(path: string): Promise; + + /** + * Gets file/directory stats + */ + stat(path: string): Promise; + + /** + * Copies file or directory + */ + cp(src: string, dest: string, options?: CpOptions): Promise; +} diff --git a/src/interfaces/process.ts b/src/interfaces/process.ts new file mode 100644 index 0000000..2191df4 --- /dev/null +++ b/src/interfaces/process.ts @@ -0,0 +1,35 @@ +/** + * Abstraction over process environment and system info. + * Enables dependency injection for testing without modifying real process state. + */ +export interface ProcessEnv { + /** + * Gets an environment variable value + */ + get(key: string): string | undefined; + + /** + * Sets an environment variable + */ + set(key: string, value: string): void; + + /** + * Deletes an environment variable + */ + delete(key: string): void; + + /** + * Gets the current working directory + */ + cwd(): string; + + /** + * Gets the user's home directory + */ + homedir(): string; + + /** + * Gets the process ID + */ + pid(): number; +} diff --git a/src/interfaces/shell.ts b/src/interfaces/shell.ts new file mode 100644 index 0000000..cf99a3b --- /dev/null +++ b/src/interfaces/shell.ts @@ -0,0 +1,30 @@ +/** + * Options for spawn operations + */ +export interface SpawnOptions { + cwd?: string; + env?: Record; + stdout?: "pipe" | "inherit" | "ignore"; + stderr?: "pipe" | "inherit" | "ignore"; + stdin?: "pipe" | "inherit" | "ignore"; +} + +/** + * Result of a synchronous spawn operation + */ +export interface SpawnSyncResult { + exitCode: number | null; + stdout: Uint8Array; + stderr: Uint8Array; +} + +/** + * Abstraction over shell/process spawning. + * Enables dependency injection for testing without running real commands. + */ +export interface Shell { + /** + * Spawns a command synchronously and waits for completion + */ + spawnSync(cmd: string[], options?: SpawnOptions): SpawnSyncResult; +} diff --git a/src/marketplace.test.ts b/src/marketplace.test.ts index e3b5d04..9cd6d32 100644 --- a/src/marketplace.test.ts +++ b/src/marketplace.test.ts @@ -1,21 +1,15 @@ -import { describe, expect, test, beforeEach, afterEach } from "bun:test"; -import { - mkdirSync, - readFileSync, - rmSync, - writeFileSync, - existsSync, -} from "node:fs"; -import { dirname, join } from "node:path"; -import { tmpdir } from "node:os"; +import { describe, expect, test, beforeEach } from "bun:test"; +import { join } from "node:path"; import { addMarketplace, listMarketplaces, removeMarketplace, updateMarketplace, updateAllMarketplaces, - type MarketplacePaths, + type MarketplaceDependencies, } from "./marketplace"; +import { createMemoryFileSystem, MemoryFileSystem } from "./adapters/memory-file-system"; +import { MockShell, createMockShell } from "./adapters/mock-shell"; interface KnownMarketplaceEntry { source: { source: "github" | "directory"; repo?: string; path?: string }; @@ -23,8 +17,11 @@ interface KnownMarketplaceEntry { lastUpdated: string; } -let tempRoot: string; -let paths: MarketplacePaths; +let deps: MarketplaceDependencies; +let memFs: MemoryFileSystem; +let mockShell: MockShell; +const knownMarketplacesPath = "/test/known_marketplaces.json"; +const marketplacesRoot = "/test/marketplaces"; function mockProcessExit(): () => void { const originalExit = process.exit; @@ -53,42 +50,19 @@ function captureConsole(method: "log" | "error"): { }; } -function mockSpawnSync( - handler: (cmd: string[]) => { exitCode: number; stdout?: Uint8Array; stderr?: Uint8Array }, -): () => void { - const original = Bun.spawnSync; - (Bun as { spawnSync: typeof Bun.spawnSync }).spawnSync = ((cmd: string[]) => - handler(cmd)) as typeof Bun.spawnSync; - return () => { - (Bun as { spawnSync: typeof Bun.spawnSync }).spawnSync = original; - }; -} - -function getKnownMarketplacesPath(): string { - if (!paths.knownMarketplacesPath) { - throw new Error("Missing known marketplaces path"); - } - return paths.knownMarketplacesPath; -} - -function writeKnownMarketplaces(entries: Record): void { - const path = getKnownMarketplacesPath(); - mkdirSync(dirname(path), { recursive: true }); - writeFileSync(path, JSON.stringify(entries, null, 2)); +async function writeKnownMarketplaces(entries: Record): Promise { + await memFs.writeFile(knownMarketplacesPath, JSON.stringify(entries, null, 2)); } -function readKnownMarketplaces(): Record { - const content = readFileSync(getKnownMarketplacesPath(), "utf-8"); +async function readKnownMarketplaces(): Promise> { + const content = await memFs.readFile(knownMarketplacesPath); return JSON.parse(content); } -function createMarketplaceDir(name: string): string { - if (!paths.marketplacesRoot) { - throw new Error("Missing marketplaces root"); - } - const installLocation = join(paths.marketplacesRoot, name); - mkdirSync(join(installLocation, ".claude-plugin"), { recursive: true }); - writeFileSync( +async function createMarketplaceDir(name: string): Promise { + const installLocation = join(marketplacesRoot, name); + await memFs.mkdir(join(installLocation, ".claude-plugin"), { recursive: true }); + await memFs.writeFile( join(installLocation, ".claude-plugin", "marketplace.json"), JSON.stringify({ name, plugins: [] }, null, 2), ); @@ -96,25 +70,21 @@ function createMarketplaceDir(name: string): string { } beforeEach(() => { - tempRoot = join(tmpdir(), `construct-marketplace-test-${Date.now()}`); - mkdirSync(tempRoot, { recursive: true }); - paths = { - knownMarketplacesPath: join(tempRoot, "known_marketplaces.json"), - marketplacesRoot: join(tempRoot, "marketplaces"), + memFs = createMemoryFileSystem().build(); + mockShell = createMockShell(); + deps = { + knownMarketplacesPath, + marketplacesRoot, + fs: memFs, + shell: mockShell, }; }); -afterEach(() => { - if (existsSync(tempRoot)) { - rmSync(tempRoot, { recursive: true, force: true }); - } -}); - describe("marketplace", () => { test("listMarketplaces() prints all known marketplaces", async () => { - const alphaLocation = createMarketplaceDir("alpha"); - const betaLocation = createMarketplaceDir("beta"); - writeKnownMarketplaces({ + const alphaLocation = await createMarketplaceDir("alpha"); + const betaLocation = await createMarketplaceDir("beta"); + await writeKnownMarketplaces({ alpha: { source: { source: "github", repo: "owner/alpha" }, installLocation: alphaLocation, @@ -129,7 +99,7 @@ describe("marketplace", () => { const { messages, restore } = captureConsole("log"); try { - await listMarketplaces(paths); + await listMarketplaces(deps); } finally { restore(); } @@ -142,7 +112,7 @@ describe("marketplace", () => { test("listMarketplaces() handles missing known_marketplaces.json", async () => { const { messages, restore } = captureConsole("log"); try { - await listMarketplaces(paths); + await listMarketplaces(deps); } finally { restore(); } @@ -151,28 +121,25 @@ describe("marketplace", () => { }); test("addMarketplace() parses full GitHub URL correctly", async () => { - const restoreSpawn = mockSpawnSync((cmd) => { + mockShell.setHandler((cmd) => { if (cmd[1] === "clone") { const installLocation = cmd[3]; if (!installLocation) { throw new Error("Missing install location"); } - mkdirSync(join(installLocation, ".claude-plugin"), { recursive: true }); - writeFileSync( + // Simulate git clone by creating the marketplace structure + memFs.mkdirSync(join(installLocation, ".claude-plugin"), { recursive: true }); + memFs.writeFileSync( join(installLocation, ".claude-plugin", "marketplace.json"), JSON.stringify({ name: "repo-name", plugins: [] }, null, 2), ); } - return { exitCode: 0 }; + return { exitCode: 0, stdout: new Uint8Array(), stderr: new Uint8Array() }; }); - try { - await addMarketplace("https://github.com/owner/repo-name", paths); - } finally { - restoreSpawn(); - } + await addMarketplace("https://github.com/owner/repo-name", deps); - const known = readKnownMarketplaces(); + const known = await readKnownMarketplaces(); const entry = known["repo-name"]; expect(entry).toBeDefined(); if (!entry) { @@ -182,28 +149,24 @@ describe("marketplace", () => { }); test("addMarketplace() parses GitHub URL with .git suffix correctly", async () => { - const restoreSpawn = mockSpawnSync((cmd) => { + mockShell.setHandler((cmd) => { if (cmd[1] === "clone") { const installLocation = cmd[3]; if (!installLocation) { throw new Error("Missing install location"); } - mkdirSync(join(installLocation, ".claude-plugin"), { recursive: true }); - writeFileSync( + memFs.mkdirSync(join(installLocation, ".claude-plugin"), { recursive: true }); + memFs.writeFileSync( join(installLocation, ".claude-plugin", "marketplace.json"), JSON.stringify({ name: "repo-name", plugins: [] }, null, 2), ); } - return { exitCode: 0 }; + return { exitCode: 0, stdout: new Uint8Array(), stderr: new Uint8Array() }; }); - try { - await addMarketplace("https://github.com/owner/repo-name.git", paths); - } finally { - restoreSpawn(); - } + await addMarketplace("https://github.com/owner/repo-name.git", deps); - const known = readKnownMarketplaces(); + const known = await readKnownMarketplaces(); const entry = known["repo-name"]; expect(entry).toBeDefined(); if (!entry) { @@ -213,28 +176,24 @@ describe("marketplace", () => { }); test("addMarketplace() parses owner/repo shorthand correctly", async () => { - const restoreSpawn = mockSpawnSync((cmd) => { + mockShell.setHandler((cmd) => { if (cmd[1] === "clone") { const installLocation = cmd[3]; if (!installLocation) { throw new Error("Missing install location"); } - mkdirSync(join(installLocation, ".claude-plugin"), { recursive: true }); - writeFileSync( + memFs.mkdirSync(join(installLocation, ".claude-plugin"), { recursive: true }); + memFs.writeFileSync( join(installLocation, ".claude-plugin", "marketplace.json"), JSON.stringify({ name: "repo-name", plugins: [] }, null, 2), ); } - return { exitCode: 0 }; + return { exitCode: 0, stdout: new Uint8Array(), stderr: new Uint8Array() }; }); - try { - await addMarketplace("owner/repo-name", paths); - } finally { - restoreSpawn(); - } + await addMarketplace("owner/repo-name", deps); - const known = readKnownMarketplaces(); + const known = await readKnownMarketplaces(); const entry = known["repo-name"]; expect(entry).toBeDefined(); if (!entry) { @@ -247,7 +206,7 @@ describe("marketplace", () => { const restoreExit = mockProcessExit(); const { messages, restore } = captureConsole("error"); try { - await expect(addMarketplace("invalid-input", paths)).rejects.toThrow( + await expect(addMarketplace("invalid-input", deps)).rejects.toThrow( "process.exit:1", ); expect(messages[0]).toBe("Error: Invalid marketplace: invalid-input"); @@ -257,9 +216,30 @@ describe("marketplace", () => { } }); + test("addMarketplace() throws error when cloned repo has no marketplace.json", async () => { + mockShell.setHandler((cmd) => { + if (cmd[1] === "clone") { + const installLocation = cmd[3]; + if (!installLocation) { + throw new Error("Missing install location"); + } + // Simulate git clone that creates directory but NO marketplace.json + memFs.mkdirSync(installLocation, { recursive: true }); + } + return { exitCode: 0, stdout: new Uint8Array(), stderr: new Uint8Array() }; + }); + + await expect(addMarketplace("owner/invalid-repo", deps)).rejects.toThrow( + "Invalid marketplace: owner/invalid-repo", + ); + + // Verify the invalid directory was cleaned up + expect(await memFs.exists(join(marketplacesRoot, "invalid-repo"))).toBe(false); + }); + test("addMarketplace() updates existing marketplace instead of erroring", async () => { - const installLocation = createMarketplaceDir("repo-name"); - writeKnownMarketplaces({ + const installLocation = await createMarketplaceDir("repo-name"); + await writeKnownMarketplaces({ "repo-name": { source: { source: "github", repo: "owner/repo-name" }, installLocation, @@ -267,31 +247,20 @@ describe("marketplace", () => { }, }); - const calls: string[][] = []; - const restoreSpawn = mockSpawnSync((cmd) => { - calls.push(cmd); - return { exitCode: 0 }; - }); - - try { - await addMarketplace("owner/repo-name", paths); - } finally { - restoreSpawn(); - } + await addMarketplace("owner/repo-name", deps); - expect(calls[0]).toEqual(["git", "-C", installLocation, "pull"]); - const updated = readKnownMarketplaces(); + expect(mockShell.calls[0]?.cmd).toEqual(["git", "-C", installLocation, "pull"]); + const updated = await readKnownMarketplaces(); const entry = updated["repo-name"]; expect(entry).toBeDefined(); if (!entry) { throw new Error("Missing marketplace entry"); } - expect(entry.lastUpdated).not.toBe("2025-01-01T00:00:00.000Z"); }); test("removeMarketplace() deletes git-cloned marketplace from disk", async () => { - const installLocation = createMarketplaceDir("to-remove"); - writeKnownMarketplaces({ + const installLocation = await createMarketplaceDir("to-remove"); + await writeKnownMarketplaces({ "to-remove": { source: { source: "github", repo: "owner/to-remove" }, installLocation, @@ -299,13 +268,13 @@ describe("marketplace", () => { }, }); - await removeMarketplace("to-remove", paths); - expect(existsSync(installLocation)).toBe(false); + await removeMarketplace("to-remove", deps); + expect(await memFs.exists(installLocation)).toBe(false); }); test("removeMarketplace() preserves directory-based marketplace on disk", async () => { - const installLocation = createMarketplaceDir("local"); - writeKnownMarketplaces({ + const installLocation = await createMarketplaceDir("local"); + await writeKnownMarketplaces({ local: { source: { source: "directory", path: installLocation }, installLocation, @@ -313,13 +282,13 @@ describe("marketplace", () => { }, }); - await removeMarketplace("local", paths); - expect(existsSync(installLocation)).toBe(true); + await removeMarketplace("local", deps); + expect(await memFs.exists(installLocation)).toBe(true); }); test("removeMarketplace() removes entry from known_marketplaces.json", async () => { - const installLocation = createMarketplaceDir("remove-entry"); - writeKnownMarketplaces({ + const installLocation = await createMarketplaceDir("remove-entry"); + await writeKnownMarketplaces({ "remove-entry": { source: { source: "github", repo: "owner/remove-entry" }, installLocation, @@ -327,8 +296,8 @@ describe("marketplace", () => { }, }); - await removeMarketplace("remove-entry", paths); - const known = readKnownMarketplaces(); + await removeMarketplace("remove-entry", deps); + const known = await readKnownMarketplaces(); expect(known["remove-entry"]).toBeUndefined(); }); @@ -336,7 +305,7 @@ describe("marketplace", () => { const restoreExit = mockProcessExit(); const { messages, restore } = captureConsole("error"); try { - await expect(removeMarketplace("missing", paths)).rejects.toThrow( + await expect(removeMarketplace("missing", deps)).rejects.toThrow( "process.exit:1", ); expect(messages[0]).toBe('Error: Marketplace "missing" not found'); @@ -347,8 +316,8 @@ describe("marketplace", () => { }); test("updateMarketplace() runs git pull on github marketplace", async () => { - const installLocation = createMarketplaceDir("update-me"); - writeKnownMarketplaces({ + const installLocation = await createMarketplaceDir("update-me"); + await writeKnownMarketplaces({ "update-me": { source: { source: "github", repo: "owner/update-me" }, installLocation, @@ -356,24 +325,14 @@ describe("marketplace", () => { }, }); - const calls: string[][] = []; - const restoreSpawn = mockSpawnSync((cmd) => { - calls.push(cmd); - return { exitCode: 0 }; - }); - - try { - await updateMarketplace("update-me", paths); - } finally { - restoreSpawn(); - } + await updateMarketplace("update-me", deps); - expect(calls[0]).toEqual(["git", "-C", installLocation, "pull"]); + expect(mockShell.calls[0]?.cmd).toEqual(["git", "-C", installLocation, "pull"]); }); test("updateMarketplace() skips directory-based marketplace", async () => { - const installLocation = createMarketplaceDir("skip-me"); - writeKnownMarketplaces({ + const installLocation = await createMarketplaceDir("skip-me"); + await writeKnownMarketplaces({ "skip-me": { source: { source: "directory", path: installLocation }, installLocation, @@ -383,7 +342,7 @@ describe("marketplace", () => { const { messages, restore } = captureConsole("log"); try { - await updateMarketplace("skip-me", paths); + await updateMarketplace("skip-me", deps); } finally { restore(); } @@ -392,8 +351,8 @@ describe("marketplace", () => { }); test("updateMarketplace() updates lastUpdated timestamp", async () => { - const installLocation = createMarketplaceDir("timestamp"); - writeKnownMarketplaces({ + const installLocation = await createMarketplaceDir("timestamp"); + await writeKnownMarketplaces({ timestamp: { source: { source: "github", repo: "owner/timestamp" }, installLocation, @@ -401,14 +360,9 @@ describe("marketplace", () => { }, }); - const restoreSpawn = mockSpawnSync(() => ({ exitCode: 0 })); - try { - await updateMarketplace("timestamp", paths); - } finally { - restoreSpawn(); - } + await updateMarketplace("timestamp", deps); - const known = readKnownMarketplaces(); + const known = await readKnownMarketplaces(); const entry = known.timestamp; expect(entry).toBeDefined(); if (!entry) { @@ -418,9 +372,9 @@ describe("marketplace", () => { }); test("updateAllMarketplaces() updates all git-based marketplaces", async () => { - const alphaLocation = createMarketplaceDir("alpha"); - const betaLocation = createMarketplaceDir("beta"); - writeKnownMarketplaces({ + const alphaLocation = await createMarketplaceDir("alpha"); + const betaLocation = await createMarketplaceDir("beta"); + await writeKnownMarketplaces({ alpha: { source: { source: "github", repo: "owner/alpha" }, installLocation: alphaLocation, @@ -433,21 +387,14 @@ describe("marketplace", () => { }, }); - const calls: string[][] = []; - const restoreSpawn = mockSpawnSync((cmd) => { - calls.push(cmd); - return { exitCode: 0 }; - }); - const { messages, restore } = captureConsole("log"); try { - await updateAllMarketplaces(paths); + await updateAllMarketplaces(deps); } finally { - restoreSpawn(); restore(); } - expect(calls).toEqual([["git", "-C", alphaLocation, "pull"]]); + expect(mockShell.calls.map((c) => c.cmd)).toEqual([["git", "-C", alphaLocation, "pull"]]); expect(messages).toContain("Updated 1 marketplace(s)"); }); }); diff --git a/src/marketplace.ts b/src/marketplace.ts index 26b5717..a8b9f39 100644 --- a/src/marketplace.ts +++ b/src/marketplace.ts @@ -1,7 +1,10 @@ import { homedir } from "node:os"; import { dirname, join } from "node:path"; -import { mkdirSync, rmSync } from "node:fs"; import { getKnownMarketplacesPath } from "./scanner"; +import type { FileSystem } from "./interfaces/file-system"; +import type { Shell } from "./interfaces/shell"; +import { bunFileSystem } from "./adapters/bun-file-system"; +import { bunShell } from "./adapters/bun-shell"; interface MarketplaceSource { source: "github" | "directory"; @@ -9,11 +12,16 @@ interface MarketplaceSource { path?: string; } -export interface MarketplacePaths { +export interface MarketplaceDependencies { knownMarketplacesPath?: string; marketplacesRoot?: string; + fs?: FileSystem; + shell?: Shell; } +/** @deprecated Use MarketplaceDependencies instead */ +export type MarketplacePaths = MarketplaceDependencies; + interface KnownMarketplacesFile { [marketplaceName: string]: { source: MarketplaceSource; @@ -22,9 +30,14 @@ interface KnownMarketplacesFile { }; } -function getMarketplacesRoot(paths?: MarketplacePaths): string { - if (paths?.marketplacesRoot) { - return paths.marketplacesRoot; +const defaultDeps = { + fs: bunFileSystem, + shell: bunShell, +}; + +function getMarketplacesRoot(deps?: MarketplaceDependencies): string { + if (deps?.marketplacesRoot) { + return deps.marketplacesRoot; } const homeDir = process.env.HOME ?? homedir(); return join(homeDir, ".claude", "plugins", "marketplaces"); @@ -38,17 +51,18 @@ function decodeOutput(output: Uint8Array | null | undefined): string { } async function readKnownMarketplaces( - paths?: MarketplacePaths, + deps?: MarketplaceDependencies, ): Promise { - const path = paths?.knownMarketplacesPath ?? getKnownMarketplacesPath(); - const file = Bun.file(path); + const { fs } = { ...defaultDeps, ...deps }; + const filePath = deps?.knownMarketplacesPath ?? getKnownMarketplacesPath(); - if (!(await file.exists())) { + if (!(await fs.exists(filePath))) { return {}; } try { - const data = await file.json(); + const content = await fs.readFile(filePath); + const data = JSON.parse(content); if (typeof data === "object" && data !== null) { return data as KnownMarketplacesFile; } @@ -64,15 +78,16 @@ async function readKnownMarketplaces( async function writeKnownMarketplaces( data: KnownMarketplacesFile, - paths?: MarketplacePaths, + deps?: MarketplaceDependencies, ): Promise { - const path = paths?.knownMarketplacesPath ?? getKnownMarketplacesPath(); + const { fs } = { ...defaultDeps, ...deps }; + const filePath = deps?.knownMarketplacesPath ?? getKnownMarketplacesPath(); try { - mkdirSync(dirname(path), { recursive: true }); - await Bun.write(path, JSON.stringify(data, null, 2)); + await fs.mkdir(dirname(filePath), { recursive: true }); + await fs.writeFile(filePath, JSON.stringify(data, null, 2)); } catch (error) { console.error( - `Error: Failed to write known marketplaces file at "${path}": ${ + `Error: Failed to write known marketplaces file at "${filePath}": ${ error instanceof Error ? error.message : String(error) }`, ); @@ -109,10 +124,11 @@ function parseMarketplaceTarget( return null; } -function runGitCommand(args: string[]): void { +function runGitCommand(args: string[], deps?: MarketplaceDependencies): void { + const { shell } = { ...defaultDeps, ...deps }; let result; try { - result = Bun.spawnSync(["git", ...args], { + result = shell.spawnSync(["git", ...args], { stdout: "pipe", stderr: "pipe", }); @@ -136,22 +152,22 @@ function runGitCommand(args: string[]): void { async function validateMarketplace( installLocation: string, target: string, + deps?: MarketplaceDependencies, ): Promise { + const { fs } = { ...defaultDeps, ...deps }; const marketplaceJsonPath = join( installLocation, ".claude-plugin", "marketplace.json", ); - const marketplaceJsonFile = Bun.file(marketplaceJsonPath); - if (!(await marketplaceJsonFile.exists())) { - rmSync(installLocation, { recursive: true, force: true }); - console.error(`Error: Invalid marketplace: ${target}`); - process.exit(1); + if (!(await fs.exists(marketplaceJsonPath))) { + await fs.rm(installLocation, { recursive: true, force: true }); + throw new Error(`Invalid marketplace: ${target}`); } } -export async function listMarketplaces(paths?: MarketplacePaths): Promise { - const knownMarketplaces = await readKnownMarketplaces(paths); +export async function listMarketplaces(deps?: MarketplaceDependencies): Promise { + const knownMarketplaces = await readKnownMarketplaces(deps); const entries = Object.entries(knownMarketplaces); if (entries.length === 0) { @@ -167,24 +183,25 @@ export async function listMarketplaces(paths?: MarketplacePaths): Promise export async function addMarketplace( target: string, - paths?: MarketplacePaths, + deps?: MarketplaceDependencies, ): Promise { + const { fs } = { ...defaultDeps, ...deps }; const parsed = parseMarketplaceTarget(target); if (!parsed) { console.error(`Error: Invalid marketplace: ${target}`); process.exit(1); } - const knownMarketplaces = await readKnownMarketplaces(paths); + const knownMarketplaces = await readKnownMarketplaces(deps); const existing = knownMarketplaces[parsed.name]; if (existing) { if (existing.source.source === "github" && existing.source.repo === parsed.repo) { - runGitCommand(["-C", existing.installLocation, "pull"]); + runGitCommand(["-C", existing.installLocation, "pull"], deps); knownMarketplaces[parsed.name] = { ...existing, lastUpdated: new Date().toISOString(), }; - await writeKnownMarketplaces(knownMarketplaces, paths); + await writeKnownMarketplaces(knownMarketplaces, deps); console.log(`Updated marketplace: ${parsed.name}`); return; } @@ -214,13 +231,13 @@ export async function addMarketplace( process.exit(1); } - const marketplacesRoot = getMarketplacesRoot(paths); - mkdirSync(marketplacesRoot, { recursive: true }); + const marketplacesRoot = getMarketplacesRoot(deps); + await fs.mkdir(marketplacesRoot, { recursive: true }); const installLocation = join(marketplacesRoot, parsed.name); const repoUrl = `https://github.com/${parsed.repo}.git`; - runGitCommand(["clone", repoUrl, installLocation]); - await validateMarketplace(installLocation, target); + runGitCommand(["clone", repoUrl, installLocation], deps); + await validateMarketplace(installLocation, target, deps); knownMarketplaces[parsed.name] = { source: { source: "github", repo: parsed.repo }, @@ -228,15 +245,16 @@ export async function addMarketplace( lastUpdated: new Date().toISOString(), }; - await writeKnownMarketplaces(knownMarketplaces, paths); + await writeKnownMarketplaces(knownMarketplaces, deps); console.log(`Added marketplace: ${parsed.name}`); } export async function removeMarketplace( name: string, - paths?: MarketplacePaths, + deps?: MarketplaceDependencies, ): Promise { - const knownMarketplaces = await readKnownMarketplaces(paths); + const { fs } = { ...defaultDeps, ...deps }; + const knownMarketplaces = await readKnownMarketplaces(deps); const marketplace = knownMarketplaces[name]; if (!marketplace) { console.error(`Error: Marketplace "${name}" not found`); @@ -244,19 +262,19 @@ export async function removeMarketplace( } if (marketplace.source.source === "github") { - rmSync(marketplace.installLocation, { recursive: true, force: true }); + await fs.rm(marketplace.installLocation, { recursive: true, force: true }); } delete knownMarketplaces[name]; - await writeKnownMarketplaces(knownMarketplaces, paths); + await writeKnownMarketplaces(knownMarketplaces, deps); console.log(`Removed marketplace: ${name}`); } export async function updateMarketplace( name: string, - paths?: MarketplacePaths, + deps?: MarketplaceDependencies, ): Promise { - const knownMarketplaces = await readKnownMarketplaces(paths); + const knownMarketplaces = await readKnownMarketplaces(deps); const marketplace = knownMarketplaces[name]; if (!marketplace) { console.error(`Error: Marketplace "${name}" not found`); @@ -268,25 +286,25 @@ export async function updateMarketplace( return; } - runGitCommand(["-C", marketplace.installLocation, "pull"]); + runGitCommand(["-C", marketplace.installLocation, "pull"], deps); knownMarketplaces[name] = { ...marketplace, lastUpdated: new Date().toISOString(), }; - await writeKnownMarketplaces(knownMarketplaces, paths); + await writeKnownMarketplaces(knownMarketplaces, deps); console.log(`Updated marketplace: ${name}`); } export async function updateAllMarketplaces( - paths?: MarketplacePaths, + deps?: MarketplaceDependencies, ): Promise { - const knownMarketplaces = await readKnownMarketplaces(paths); + const knownMarketplaces = await readKnownMarketplaces(deps); const githubMarketplaces = Object.entries(knownMarketplaces).filter( ([, info]) => info.source.source === "github", ); for (const [name] of githubMarketplaces) { - await updateMarketplace(name, paths); + await updateMarketplace(name, deps); } console.log(`Updated ${githubMarketplaces.length} marketplace(s)`); diff --git a/src/plugin.test.ts b/src/plugin.test.ts index 39c7aa8..d9d76aa 100644 --- a/src/plugin.test.ts +++ b/src/plugin.test.ts @@ -1,255 +1,177 @@ -import { describe, expect, test, beforeEach, afterEach } from "bun:test"; -import { mkdirSync, readFileSync, rmSync, writeFileSync, existsSync } from "node:fs"; -import { join } from "node:path"; -import { tmpdir } from "node:os"; -import { enablePlugin, disablePlugin, listEnabledPlugins } from "./plugin"; - -let tempRoot: string; -let originalCwd: string; -let originalHome: string | undefined; - -function mockProcessExit(): () => void { - const originalExit = process.exit; - (process as { exit: typeof process.exit }).exit = ((code?: number) => { - throw new Error(`process.exit:${code ?? 0}`); - }) as typeof process.exit; - return () => { - (process as { exit: typeof process.exit }).exit = originalExit; - }; +import { describe, expect, test } from "bun:test"; +import { + enablePlugin, + disablePlugin, + listEnabledPlugins, + type PluginDependencies, +} from "./plugin"; +import type { PluginRegistry } from "./scanner"; +import type { ConstructConfig } from "./config"; + +interface MockState { + savedConfig: ConstructConfig | null; + exitCode: number | null; + logs: string[]; + errors: string[]; } -function captureConsole(method: "log" | "error"): { - messages: string[]; - restore: () => void; -} { - const messages: string[] = []; - const original = console[method]; - console[method] = (...args: unknown[]) => { - messages.push(args.map(String).join(" ")); - }; - return { - messages, - restore: () => { - console[method] = original; +function createMockDeps( + overrides: Partial<{ + plugins: Map; + config: ConstructConfig | null; + }> = {}, +): PluginDependencies & MockState { + const result: PluginDependencies & MockState = { + savedConfig: null, + exitCode: null, + logs: [], + errors: [], + scanAllPlugins: async (): Promise => ({ + plugins: overrides.plugins ?? new Map(), + }), + loadConfig: async () => overrides.config ?? null, + saveConfig: async (config: ConstructConfig) => { + result.savedConfig = config; + }, + exit: ((code: number) => { + result.exitCode = code; + throw new Error(`exit:${code}`); + }) as (code: number) => never, + log: (msg: string) => { + result.logs.push(msg); + }, + error: (msg: string) => { + result.errors.push(msg); }, }; -} - -beforeEach(() => { - tempRoot = join(tmpdir(), `construct-plugin-test-${Date.now()}`); - mkdirSync(tempRoot, { recursive: true }); - - originalCwd = process.cwd(); - originalHome = process.env.HOME; - - const homeDir = join(tempRoot, "home"); - const workDir = join(tempRoot, "work"); - mkdirSync(homeDir, { recursive: true }); - mkdirSync(workDir, { recursive: true }); - - process.env.HOME = homeDir; - process.chdir(workDir); - - const marketplaceDir = join( - homeDir, - ".claude", - "plugins", - "marketplaces", - "test-marketplace", - ); - mkdirSync(join(marketplaceDir, ".claude-plugin"), { recursive: true }); - mkdirSync(join(marketplaceDir, "tmux"), { recursive: true }); - writeFileSync( - join(marketplaceDir, ".claude-plugin", "marketplace.json"), - JSON.stringify( - { - name: "test-marketplace", - plugins: [{ name: "tmux", source: "tmux", version: "1.0.0" }], - }, - null, - 2, - ), - ); - - const knownMarketplacesPath = join( - homeDir, - ".claude", - "plugins", - "known_marketplaces.json", - ); - mkdirSync(join(homeDir, ".claude", "plugins"), { recursive: true }); - writeFileSync( - knownMarketplacesPath, - JSON.stringify( - { - "test-marketplace": { - source: { source: "github", repo: "owner/test-marketplace" }, - installLocation: marketplaceDir, - lastUpdated: new Date().toISOString(), - }, - }, - null, - 2, - ), - ); -}); -afterEach(() => { - process.chdir(originalCwd); - if (originalHome === undefined) { - delete process.env.HOME; - } else { - process.env.HOME = originalHome; - } - if (existsSync(tempRoot)) { - rmSync(tempRoot, { recursive: true, force: true }); - } -}); + return result; +} describe("plugin", () => { - test("enablePlugin() adds plugin to .construct.json when plugin exists", async () => { - await enablePlugin("tmux@test-marketplace"); + test("enablePlugin() adds plugin to config when plugin exists", async () => { + const deps = createMockDeps({ + plugins: new Map([["tmux@test-marketplace", { name: "tmux" }]]), + config: null, + }); - const configPath = join(process.cwd(), ".construct.json"); - const config = JSON.parse(readFileSync(configPath, "utf-8")); + await enablePlugin("tmux@test-marketplace", deps); - expect(config.enabledPlugins).toEqual(["tmux@test-marketplace"]); + expect(deps.savedConfig?.enabledPlugins).toEqual(["tmux@test-marketplace"]); + expect(deps.logs).toContain("Enabled plugin: tmux@test-marketplace"); }); test("enablePlugin() exits with error when plugin not found", async () => { - const restoreExit = mockProcessExit(); - const { messages, restore } = captureConsole("error"); - try { - await expect(enablePlugin("missing@test-marketplace")).rejects.toThrow( - "process.exit:1", - ); - expect(messages[0]).toBe( - 'Error: Plugin "missing@test-marketplace" not found in any known marketplace', - ); - } finally { - restoreExit(); - restore(); - } + const deps = createMockDeps({ + plugins: new Map(), + }); + + await expect( + enablePlugin("missing@test-marketplace", deps), + ).rejects.toThrow("exit:1"); + + expect(deps.exitCode).toBe(1); + expect(deps.errors[0]).toBe( + 'Error: Plugin "missing@test-marketplace" not found in any known marketplace', + ); }); test("enablePlugin() is idempotent (no duplicate entries)", async () => { - await enablePlugin("tmux@test-marketplace"); - await enablePlugin("tmux@test-marketplace"); + const deps = createMockDeps({ + plugins: new Map([["tmux@test-marketplace", { name: "tmux" }]]), + config: { + enabledPlugins: ["tmux@test-marketplace"], + lastUsed: "2024-01-01T00:00:00.000Z", + }, + }); - const configPath = join(process.cwd(), ".construct.json"); - const config = JSON.parse(readFileSync(configPath, "utf-8")); + await enablePlugin("tmux@test-marketplace", deps); - expect(config.enabledPlugins).toEqual(["tmux@test-marketplace"]); + expect(deps.savedConfig).toBeNull(); + expect(deps.logs).toContain( + "Plugin already enabled: tmux@test-marketplace", + ); }); - test("disablePlugin() removes plugin from .construct.json", async () => { - const configPath = join(process.cwd(), ".construct.json"); - writeFileSync( - configPath, - JSON.stringify( - { - enabledPlugins: ["tmux@test-marketplace"], - lastUsed: new Date().toISOString(), - }, - null, - 2, - ), - ); + test("disablePlugin() removes plugin from config", async () => { + const deps = createMockDeps({ + config: { + enabledPlugins: ["tmux@test-marketplace"], + lastUsed: "2024-01-01T00:00:00.000Z", + }, + }); - await disablePlugin("tmux@test-marketplace"); + await disablePlugin("tmux@test-marketplace", deps); - const config = JSON.parse(readFileSync(configPath, "utf-8")); - expect(config.enabledPlugins).toEqual([]); + expect(deps.savedConfig?.enabledPlugins).toEqual([]); + expect(deps.logs).toContain("Disabled plugin: tmux@test-marketplace"); }); - test("disablePlugin() handles missing .construct.json gracefully", async () => { - await disablePlugin("tmux@test-marketplace"); - expect(existsSync(join(process.cwd(), ".construct.json"))).toBe(false); + test("disablePlugin() handles missing config gracefully", async () => { + const deps = createMockDeps({ + config: null, + }); + + await disablePlugin("tmux@test-marketplace", deps); + + expect(deps.savedConfig).toBeNull(); + expect(deps.logs).toContain("Plugin not enabled: tmux@test-marketplace"); }); test("disablePlugin() handles plugin not in config gracefully", async () => { - const configPath = join(process.cwd(), ".construct.json"); - writeFileSync( - configPath, - JSON.stringify( - { - enabledPlugins: ["other@test-marketplace"], - lastUsed: new Date().toISOString(), - }, - null, - 2, - ), - ); + const deps = createMockDeps({ + config: { + enabledPlugins: ["other@test-marketplace"], + lastUsed: "2024-01-01T00:00:00.000Z", + }, + }); - await disablePlugin("tmux@test-marketplace"); + await disablePlugin("tmux@test-marketplace", deps); - const config = JSON.parse(readFileSync(configPath, "utf-8")); - expect(config.enabledPlugins).toEqual(["other@test-marketplace"]); + expect(deps.savedConfig).toBeNull(); + expect(deps.logs).toContain("Plugin not enabled: tmux@test-marketplace"); }); - test("listEnabledPlugins() prints all enabled plugins from .construct.json", async () => { - const configPath = join(process.cwd(), ".construct.json"); - writeFileSync( - configPath, - JSON.stringify( - { - enabledPlugins: [ - "tmux@test-marketplace", - "playwright@test-marketplace", - ], - lastUsed: new Date().toISOString(), - }, - null, - 2, - ), - ); + test("listEnabledPlugins() prints all enabled plugins", async () => { + const deps = createMockDeps({ + config: { + enabledPlugins: [ + "tmux@test-marketplace", + "playwright@test-marketplace", + ], + lastUsed: "2024-01-01T00:00:00.000Z", + }, + }); - const { messages, restore } = captureConsole("log"); - try { - await listEnabledPlugins(); - } finally { - restore(); - } + await listEnabledPlugins(deps); - expect(messages).toEqual([ + expect(deps.logs).toEqual([ "Enabled plugins:", " tmux@test-marketplace", " playwright@test-marketplace", ]); }); - test("listEnabledPlugins() handles missing .construct.json gracefully", async () => { - const { messages, restore } = captureConsole("log"); - try { - await listEnabledPlugins(); - } finally { - restore(); - } + test("listEnabledPlugins() handles missing config gracefully", async () => { + const deps = createMockDeps({ + config: null, + }); - expect(messages).toEqual(["No plugins enabled."]); + await listEnabledPlugins(deps); + + expect(deps.logs).toEqual(["No plugins enabled."]); }); test("listEnabledPlugins() handles empty enabledPlugins array", async () => { - const configPath = join(process.cwd(), ".construct.json"); - writeFileSync( - configPath, - JSON.stringify( - { - enabledPlugins: [], - lastUsed: new Date().toISOString(), - }, - null, - 2, - ), - ); + const deps = createMockDeps({ + config: { + enabledPlugins: [], + lastUsed: "2024-01-01T00:00:00.000Z", + }, + }); - const { messages, restore } = captureConsole("log"); - try { - await listEnabledPlugins(); - } finally { - restore(); - } + await listEnabledPlugins(deps); - expect(messages).toEqual(["No plugins enabled."]); + expect(deps.logs).toEqual(["No plugins enabled."]); }); }); diff --git a/src/plugin.ts b/src/plugin.ts index 8dcdfcb..424eea6 100644 --- a/src/plugin.ts +++ b/src/plugin.ts @@ -1,24 +1,58 @@ -import { scanAllPlugins } from "./scanner"; -import { loadConfig, saveConfig } from "./config"; +import { + scanAllPlugins as defaultScanAllPlugins, + type PluginRegistry, +} from "./scanner"; +import { + loadConfig as defaultLoadConfig, + saveConfig as defaultSaveConfig, + type ConstructConfig, +} from "./config"; + +/** + * Dependencies for plugin operations, allowing injection for testing. + */ +export interface PluginDependencies { + scanAllPlugins?: () => Promise; + loadConfig?: () => Promise; + saveConfig?: (config: ConstructConfig) => Promise; + exit?: (code: number) => never; + log?: (msg: string) => void; + error?: (msg: string) => void; +} + +const defaultDeps: Required = { + scanAllPlugins: defaultScanAllPlugins, + loadConfig: defaultLoadConfig, + saveConfig: defaultSaveConfig, + exit: (code: number) => process.exit(code), + log: (msg: string) => console.log(msg), + error: (msg: string) => console.error(msg), +}; /** * Enables a plugin by adding it to the project config. */ -export async function enablePlugin(pluginName: string): Promise { +export async function enablePlugin( + pluginName: string, + deps?: PluginDependencies, +): Promise { + const { scanAllPlugins, loadConfig, saveConfig, exit, log, error } = { + ...defaultDeps, + ...deps, + }; + const registry = await scanAllPlugins(); if (!registry.plugins.has(pluginName)) { - console.error( - `Error: Plugin "${pluginName}" not found in any known marketplace`, - ); - process.exit(1); + error(`Error: Plugin "${pluginName}" not found in any known marketplace`); + exit(1); } const config = await loadConfig(); const enabledPlugins = config?.enabledPlugins ?? []; if (enabledPlugins.includes(pluginName)) { - console.log(`Plugin already enabled: ${pluginName}`); + log(`Plugin already enabled: ${pluginName}`); return; } @@ -27,17 +61,22 @@ export async function enablePlugin(pluginName: string): Promise { lastUsed: new Date().toISOString(), }); - console.log(`Enabled plugin: ${pluginName}`); + log(`Enabled plugin: ${pluginName}`); } /** * Disables a plugin by removing it from the project config. */ -export async function disablePlugin(pluginName: string): Promise { +export async function disablePlugin( + pluginName: string, + deps?: PluginDependencies, +): Promise { + const { loadConfig, saveConfig, log } = { ...defaultDeps, ...deps }; + const config = await loadConfig(); if (!config || !config.enabledPlugins.includes(pluginName)) { - console.log(`Plugin not enabled: ${pluginName}`); + log(`Plugin not enabled: ${pluginName}`); return; } @@ -46,23 +85,27 @@ export async function disablePlugin(pluginName: string): Promise { lastUsed: new Date().toISOString(), }); - console.log(`Disabled plugin: ${pluginName}`); + log(`Disabled plugin: ${pluginName}`); } /** * Lists enabled plugins from the project config. */ -export async function listEnabledPlugins(): Promise { +export async function listEnabledPlugins( + deps?: PluginDependencies, +): Promise { + const { loadConfig, log } = { ...defaultDeps, ...deps }; + const config = await loadConfig(); const enabledPlugins = config?.enabledPlugins ?? []; if (enabledPlugins.length === 0) { - console.log("No plugins enabled."); + log("No plugins enabled."); return; } - console.log("Enabled plugins:"); + log("Enabled plugins:"); for (const pluginName of enabledPlugins) { - console.log(` ${pluginName}`); + log(` ${pluginName}`); } } diff --git a/src/scanner.test.ts b/src/scanner.test.ts new file mode 100644 index 0000000..d74e6a0 --- /dev/null +++ b/src/scanner.test.ts @@ -0,0 +1,230 @@ +import { describe, expect, test } from "bun:test"; +import { createMemoryFileSystem } from "./adapters/memory-file-system"; +import { MockProcess } from "./adapters/mock-process"; +import { + getKnownMarketplacesPath, + scanAllPlugins, + listAvailablePlugins, +} from "./scanner"; +import type { ScannerDependencies } from "./scanner"; + +function createDeps( + files: Record = {}, + homedir = "/home/user" +): ScannerDependencies { + const builder = createMemoryFileSystem(); + for (const [path, content] of Object.entries(files)) { + builder.withFile(path, content); + } + return { + fs: builder.build(), + process: new MockProcess({ homedir }), + }; +} + +describe("scanner", () => { + describe("getKnownMarketplacesPath", () => { + test("returns correct path using homedir from deps", () => { + const deps = createDeps({}, "/Users/test"); + const path = getKnownMarketplacesPath(deps); + expect(path).toBe("/Users/test/.claude/plugins/known_marketplaces.json"); + }); + + test("returns correct path using HOME env var when set", () => { + const proc = new MockProcess({ env: { HOME: "/custom/home" } }); + const deps: ScannerDependencies = { + process: proc, + }; + const path = getKnownMarketplacesPath(deps); + expect(path).toBe("/custom/home/.claude/plugins/known_marketplaces.json"); + }); + + test("falls back to homedir when HOME not set", () => { + const proc = new MockProcess({ homedir: "/fallback/home" }); + const deps: ScannerDependencies = { + process: proc, + }; + const path = getKnownMarketplacesPath(deps); + expect(path).toBe( + "/fallback/home/.claude/plugins/known_marketplaces.json" + ); + }); + }); + + describe("scanAllPlugins", () => { + test("returns empty registry when no marketplaces file exists", async () => { + const deps = createDeps({}); + const registry = await scanAllPlugins(deps); + expect(registry.plugins.size).toBe(0); + }); + + test("returns empty registry when marketplaces file is empty object", async () => { + const deps = createDeps({ + "/home/user/.claude/plugins/known_marketplaces.json": "{}", + }); + const registry = await scanAllPlugins(deps); + expect(registry.plugins.size).toBe(0); + }); + + test("finds plugins from marketplace", async () => { + const marketplaceDir = "/home/user/.claude/plugins/marketplaces/test-mp"; + const deps = createDeps({ + "/home/user/.claude/plugins/known_marketplaces.json": JSON.stringify({ + "test-mp": { + source: { source: "github", repo: "owner/test-mp" }, + installLocation: marketplaceDir, + lastUpdated: "2024-01-01T00:00:00Z", + }, + }), + [`${marketplaceDir}/.claude-plugin/marketplace.json`]: JSON.stringify({ + name: "test-mp", + plugins: [ + { + name: "my-plugin", + source: "plugins/my-plugin", + version: "1.0.0", + description: "A test plugin", + }, + ], + }), + }); + + const registry = await scanAllPlugins(deps); + + expect(registry.plugins.size).toBe(1); + expect(registry.plugins.has("my-plugin@test-mp")).toBe(true); + + const plugin = registry.plugins.get("my-plugin@test-mp"); + expect(plugin?.name).toBe("my-plugin@test-mp"); + expect(plugin?.version).toBe("1.0.0"); + expect(plugin?.description).toBe("A test plugin"); + expect(plugin?.installPath).toBe(`${marketplaceDir}/plugins/my-plugin`); + }); + + test("finds multiple plugins from multiple marketplaces", async () => { + const mp1Dir = "/home/user/.claude/plugins/marketplaces/marketplace-one"; + const mp2Dir = "/home/user/.claude/plugins/marketplaces/marketplace-two"; + const deps = createDeps({ + "/home/user/.claude/plugins/known_marketplaces.json": JSON.stringify({ + "marketplace-one": { + source: { source: "github" }, + installLocation: mp1Dir, + lastUpdated: "2024-01-01T00:00:00Z", + }, + "marketplace-two": { + source: { source: "github" }, + installLocation: mp2Dir, + lastUpdated: "2024-01-01T00:00:00Z", + }, + }), + [`${mp1Dir}/.claude-plugin/marketplace.json`]: JSON.stringify({ + name: "marketplace-one", + plugins: [{ name: "plugin-a", source: "plugin-a", version: "1.0.0" }], + }), + [`${mp2Dir}/.claude-plugin/marketplace.json`]: JSON.stringify({ + name: "marketplace-two", + plugins: [{ name: "plugin-b", source: "plugin-b", version: "2.0.0" }], + }), + }); + + const registry = await scanAllPlugins(deps); + + expect(registry.plugins.size).toBe(2); + expect(registry.plugins.has("plugin-a@marketplace-one")).toBe(true); + expect(registry.plugins.has("plugin-b@marketplace-two")).toBe(true); + }); + + test("skips plugins with non-string source", async () => { + const marketplaceDir = "/home/user/.claude/plugins/marketplaces/test-mp"; + const deps = createDeps({ + "/home/user/.claude/plugins/known_marketplaces.json": JSON.stringify({ + "test-mp": { + source: { source: "github" }, + installLocation: marketplaceDir, + lastUpdated: "2024-01-01T00:00:00Z", + }, + }), + [`${marketplaceDir}/.claude-plugin/marketplace.json`]: JSON.stringify({ + name: "test-mp", + plugins: [ + { name: "local-plugin", source: "local-plugin", version: "1.0.0" }, + { + name: "remote-plugin", + source: { type: "git", url: "https://..." }, + version: "1.0.0", + }, + ], + }), + }); + + const registry = await scanAllPlugins(deps); + + expect(registry.plugins.size).toBe(1); + expect(registry.plugins.has("local-plugin@test-mp")).toBe(true); + expect(registry.plugins.has("remote-plugin@test-mp")).toBe(false); + }); + + test("sanitizes multiline descriptions", async () => { + const marketplaceDir = "/home/user/.claude/plugins/marketplaces/test-mp"; + const deps = createDeps({ + "/home/user/.claude/plugins/known_marketplaces.json": JSON.stringify({ + "test-mp": { + source: { source: "github" }, + installLocation: marketplaceDir, + lastUpdated: "2024-01-01T00:00:00Z", + }, + }), + [`${marketplaceDir}/.claude-plugin/marketplace.json`]: JSON.stringify({ + name: "test-mp", + plugins: [ + { + name: "my-plugin", + source: "my-plugin", + version: "1.0.0", + description: "Line one\nLine two\tTabbed", + }, + ], + }), + }); + + const registry = await scanAllPlugins(deps); + const plugin = registry.plugins.get("my-plugin@test-mp"); + + expect(plugin?.description).toBe("Line one Line two Tabbed"); + }); + }); + + describe("listAvailablePlugins", () => { + test("returns empty array when no plugins exist", async () => { + const deps = createDeps({}); + const plugins = await listAvailablePlugins(deps); + expect(plugins).toEqual([]); + }); + + test("returns plugin names from registry", async () => { + const marketplaceDir = "/home/user/.claude/plugins/marketplaces/test-mp"; + const deps = createDeps({ + "/home/user/.claude/plugins/known_marketplaces.json": JSON.stringify({ + "test-mp": { + source: { source: "github" }, + installLocation: marketplaceDir, + lastUpdated: "2024-01-01T00:00:00Z", + }, + }), + [`${marketplaceDir}/.claude-plugin/marketplace.json`]: JSON.stringify({ + name: "test-mp", + plugins: [ + { name: "alpha", source: "alpha" }, + { name: "beta", source: "beta" }, + ], + }), + }); + + const plugins = await listAvailablePlugins(deps); + + expect(plugins).toContain("alpha@test-mp"); + expect(plugins).toContain("beta@test-mp"); + expect(plugins.length).toBe(2); + }); + }); +}); diff --git a/src/scanner.ts b/src/scanner.ts index 4520626..50f5e48 100644 --- a/src/scanner.ts +++ b/src/scanner.ts @@ -1,7 +1,17 @@ import { join } from "node:path"; -import { homedir } from "node:os"; -import { stat } from "node:fs/promises"; import { Glob } from "bun"; +import type { FileSystem } from "./interfaces/file-system"; +import type { ProcessEnv } from "./interfaces/process"; +import { bunFileSystem } from "./adapters/bun-file-system"; +import { nodeProcess } from "./adapters/node-process"; + +/** + * Dependencies for scanner functions, enabling dependency injection for testing + */ +export interface ScannerDependencies { + fs?: FileSystem; + process?: ProcessEnv; +} /** * Represents a single component within a plugin (skill, MCP server, or agent) @@ -70,15 +80,17 @@ function sanitizeDescription(description?: string): string | undefined { /** * Gets the path to the known marketplaces configuration file */ -export function getKnownMarketplacesPath(): string { - const homeDir = process.env.HOME ?? homedir(); +export function getKnownMarketplacesPath(deps: ScannerDependencies = {}): string { + const proc = deps.process ?? nodeProcess; + const homeDir = proc.get('HOME') ?? proc.homedir(); return join(homeDir, '.claude', 'plugins', 'known_marketplaces.json'); } /** * Scans a plugin directory for components (skills, MCPs, agents) */ -async function scanPluginComponents(installPath: string): Promise { +async function scanPluginComponents(installPath: string, deps: ScannerDependencies = {}): Promise { + const fs = deps.fs ?? bunFileSystem; const components: PluginComponent[] = []; try { @@ -99,8 +111,7 @@ async function scanPluginComponents(installPath: string): Promise { +export async function scanMarketplacePlugins(deps: ScannerDependencies = {}): Promise { + const fs = deps.fs ?? bunFileSystem; const registry: PluginRegistry = { plugins: new Map() }; - const knownMarketplacesPath = getKnownMarketplacesPath(); - const knownMarketplacesFile = Bun.file(knownMarketplacesPath); + const knownMarketplacesPath = getKnownMarketplacesPath(deps); // Handle missing known_marketplaces.json gracefully - if (!await knownMarketplacesFile.exists()) { + if (!await fs.exists(knownMarketplacesPath)) { return registry; } try { - const knownMarketplacesData: KnownMarketplacesFile = await knownMarketplacesFile.json(); + const knownMarketplacesData: KnownMarketplacesFile = JSON.parse(await fs.readFile(knownMarketplacesPath)); // Process each known marketplace for (const [marketplaceName, marketplaceInfo] of Object.entries(knownMarketplacesData)) { @@ -154,20 +165,19 @@ export async function scanMarketplacePlugins(): Promise { try { // Check if the marketplace directory exists - const marketplaceStat = await stat(installLocation); + const marketplaceStat = await fs.stat(installLocation); if (!marketplaceStat.isDirectory()) { continue; } // Read the marketplace.json file const marketplaceJsonPath = join(installLocation, '.claude-plugin', 'marketplace.json'); - const marketplaceJsonFile = Bun.file(marketplaceJsonPath); - if (!await marketplaceJsonFile.exists()) { + if (!await fs.exists(marketplaceJsonPath)) { continue; } - const marketplaceData: MarketplaceFile = await marketplaceJsonFile.json(); + const marketplaceData: MarketplaceFile = JSON.parse(await fs.readFile(marketplaceJsonPath)); // Process each plugin in the marketplace for (const plugin of marketplaceData.plugins) { @@ -181,7 +191,7 @@ export async function scanMarketplacePlugins(): Promise { const pluginPath = join(installLocation, plugin.source); // Scan for components in the plugin directory - const components = await scanPluginComponents(pluginPath); + const components = await scanPluginComponents(pluginPath, deps); const description = sanitizeDescription(plugin.description); // Use the format plugin-name@marketplace-name @@ -212,14 +222,14 @@ export async function scanMarketplacePlugins(): Promise { /** * Scans all available plugins from known marketplaces */ -export async function scanAllPlugins(): Promise { - return scanMarketplacePlugins(); +export async function scanAllPlugins(deps: ScannerDependencies = {}): Promise { + return scanMarketplacePlugins(deps); } /** * Returns a list of all available plugin names (installed and marketplace) */ -export async function listAvailablePlugins(): Promise { - const registry = await scanAllPlugins(); +export async function listAvailablePlugins(deps: ScannerDependencies = {}): Promise { + const registry = await scanAllPlugins(deps); return Array.from(registry.plugins.keys()); } diff --git a/src/test-utils/index.ts b/src/test-utils/index.ts new file mode 100644 index 0000000..ea3d6a7 --- /dev/null +++ b/src/test-utils/index.ts @@ -0,0 +1,10 @@ +// Re-export memory file system +export { MemoryFileSystem, createMemoryFileSystem } from '../adapters/memory-file-system'; + +// Re-export mock process +export { MockProcess, createMockProcess } from '../adapters/mock-process'; +export type { MockProcessOptions } from '../adapters/mock-process'; + +// Re-export mock shell +export { MockShell, createMockShell } from '../adapters/mock-shell'; +export type { ShellHandler } from '../adapters/mock-shell'; diff --git a/src/translator.ts b/src/translator.ts index eb3a988..34a43a9 100644 --- a/src/translator.ts +++ b/src/translator.ts @@ -1,7 +1,18 @@ import { join } from "node:path"; import type { PluginInfo, PluginComponent } from "./scanner"; import { translateAgents, type TranslatedAgent } from "./agent-translator"; -import { initCache, getCachedPlugin } from "./cache"; +import { createCache } from "./cache"; +import type { CacheInstance } from "./cache"; +import type { FileSystem } from "./interfaces/file-system"; +import { bunFileSystem } from "./adapters/bun-file-system"; + +/** + * Dependencies for translator operations + */ +export interface TranslatorDependencies { + cache?: CacheInstance; + fs?: FileSystem; +} /** * Claude Code MCP server configuration format (local) @@ -121,11 +132,11 @@ function transformMcpServer( * Reads and parses a .mcp.json file */ async function readMcpConfig( - mcpConfigPath: string + mcpConfigPath: string, + fs: FileSystem ): Promise { try { - const file = Bun.file(mcpConfigPath); - const text = await file.text(); + const text = await fs.readFile(mcpConfigPath); return JSON.parse(text) as ClaudeMcpConfig; } catch (error) { console.error(`Failed to read MCP config at ${mcpConfigPath}:`, error); @@ -161,16 +172,19 @@ function getMcpConfigPath(plugin: PluginInfo): string | undefined { * for GitHub Copilot CLI. * * @param plugins - Array of enabled plugins with their component information + * @param deps - Optional dependencies for cache and file system * @returns Translation result with environment variables and MCP config JSON */ export async function translatePlugins( - plugins: PluginInfo[] + plugins: PluginInfo[], + deps?: TranslatorDependencies ): Promise { const env: Record = {}; const allMcpServers: Record = {}; - // Initialize cache - initCache(); + // Use provided dependencies or defaults + const cache = deps?.cache ?? createCache(); + const fs = deps?.fs ?? bunFileSystem; // Map plugin to cached path for reuse const pluginCachePaths = new Map(); @@ -178,7 +192,7 @@ export async function translatePlugins( // 1. Build COPILOT_SKILLS_DIRS from all skill paths (using cached paths) const allSkillPaths: string[] = []; for (const plugin of plugins) { - const cachedPath = await getCachedPlugin(plugin); + const cachedPath = await cache.getCachedPlugin(plugin); pluginCachePaths.set(plugin, cachedPath); allSkillPaths.push(...getSkillPaths(plugin, cachedPath)); } @@ -195,7 +209,7 @@ export async function translatePlugins( const cachedPath = pluginCachePaths.get(plugin); if (cachedPath) { const cachedMcpConfigPath = join(cachedPath, ".mcp.json"); - const claudeConfig = await readMcpConfig(cachedMcpConfigPath); + const claudeConfig = await readMcpConfig(cachedMcpConfigPath, fs); if (claudeConfig) { // Transform each server in the config // No inline expansion needed - cache files already have vars expanded