From a720a9e63b41122d187f8b142b0bb41731885887 Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 13:57:26 +0200 Subject: [PATCH 01/18] feat(types): add ScopeMetadata interface, Rule.metadata, and ProjectConfig.global Add ScopeMetadata interface with optional globs, paths, and trigger fields. Add optional metadata field to Rule interface for scope targeting. Add optional global field to ProjectConfig (defaults to true semantically). --- packages/cli/src/bridges/types.ts | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/cli/src/bridges/types.ts b/packages/cli/src/bridges/types.ts index 0f878b1..5aef8e7 100644 --- a/packages/cli/src/bridges/types.ts +++ b/packages/cli/src/bridges/types.ts @@ -1,3 +1,9 @@ +export interface ScopeMetadata { + globs?: string[]; + paths?: string[]; + trigger?: 'always' | 'glob' | 'manual'; +} + export interface Rule { id: string; scope: string; @@ -7,6 +13,7 @@ export interface Rule { enabled: boolean; sourceBlock?: string; source?: string; + metadata?: ScopeMetadata; } export interface PulledEntry { @@ -26,6 +33,7 @@ export interface ProjectConfig { blocks: string[]; pulled: PulledEntry[]; assets: AssetEntry[]; + global?: boolean; } export const ASSET_TYPE = { From 9bd9e7af32f05b526f44753c3761d57e37d9b4ec Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:08:59 +0200 Subject: [PATCH 02/18] feat(types): refactor Bridge into DirectoryBridge | MarkerBridge discriminated union Replace monolithic Bridge interface with discriminated union: - DirectoryBridge (kind: 'directory'): claude, cursor, windsurf - MarkerBridge (kind: 'marker'): gemini, copilot Add isDirectoryBridge/isMarkerBridge type guards and getBridgeOutputPaths helper. Update all bridge implementations and consumer code to use the new union. Update tests for new bridge shapes. --- packages/cli/src/bridges/claude.ts | 10 +++--- packages/cli/src/bridges/copilot.ts | 5 +-- packages/cli/src/bridges/cursor.ts | 10 +++--- packages/cli/src/bridges/gemini.ts | 5 +-- packages/cli/src/bridges/types.ts | 35 +++++++++++++++++++-- packages/cli/src/bridges/windsurf.ts | 10 +++--- packages/cli/src/commands/compile.ts | 20 ++++++++---- packages/cli/src/commands/doctor.ts | 3 +- packages/cli/src/commands/explain.ts | 11 +++++-- packages/cli/src/commands/list.ts | 16 +++++++--- packages/cli/tests/bridges/copilot.test.ts | 4 +++ packages/cli/tests/bridges/windsurf.test.ts | 10 +++--- packages/cli/tests/commands/doctor.test.ts | 18 ++++++----- packages/cli/tests/commands/explain.test.ts | 4 +-- packages/cli/tests/e2e/cli.test.ts | 18 +++++------ packages/cli/tests/ui/output.test.ts | 6 ++-- 16 files changed, 126 insertions(+), 59 deletions(-) diff --git a/packages/cli/src/bridges/claude.ts b/packages/cli/src/bridges/claude.ts index 42befe8..5c33795 100644 --- a/packages/cli/src/bridges/claude.ts +++ b/packages/cli/src/bridges/claude.ts @@ -1,4 +1,4 @@ -import type { Bridge, Rule, ProjectConfig } from './types.js'; +import type { DirectoryBridge, Rule, ProjectConfig } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; function buildMarkdown(rules: Rule[]): string { @@ -31,10 +31,12 @@ function buildMarkdown(rules: Rule[]): string { return lines.join('\n'); } -export const claudeBridge: Bridge = { +export const claudeBridge: DirectoryBridge = { id: 'claude', - outputPaths: ['CLAUDE.md'], - usesMarkers: true, + kind: 'directory', + outputDir: '.claude/rules', + filePrefix: 'dwf-', + fileExtension: '.md', compile(rules: Rule[], _config: ProjectConfig): Map { const output = new Map(); diff --git a/packages/cli/src/bridges/copilot.ts b/packages/cli/src/bridges/copilot.ts index 5861809..1b886d6 100644 --- a/packages/cli/src/bridges/copilot.ts +++ b/packages/cli/src/bridges/copilot.ts @@ -1,4 +1,4 @@ -import type { Bridge, Rule, ProjectConfig } from './types.js'; +import type { MarkerBridge, Rule, ProjectConfig } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; function buildMarkdown(rules: Rule[]): string { @@ -31,8 +31,9 @@ function buildMarkdown(rules: Rule[]): string { return lines.join('\n'); } -export const copilotBridge: Bridge = { +export const copilotBridge: MarkerBridge = { id: 'copilot', + kind: 'marker', outputPaths: ['.github/copilot-instructions.md'], usesMarkers: true, diff --git a/packages/cli/src/bridges/cursor.ts b/packages/cli/src/bridges/cursor.ts index 8de9656..2f86f86 100644 --- a/packages/cli/src/bridges/cursor.ts +++ b/packages/cli/src/bridges/cursor.ts @@ -1,4 +1,4 @@ -import type { Bridge, Rule, ProjectConfig } from './types.js'; +import type { DirectoryBridge, Rule, ProjectConfig } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; const FRONTMATTER = `--- @@ -39,10 +39,12 @@ function buildMdc(rules: Rule[]): string { return lines.join('\n'); } -export const cursorBridge: Bridge = { +export const cursorBridge: DirectoryBridge = { id: 'cursor', - outputPaths: ['.cursor/rules/devworkflows.mdc'], - usesMarkers: false, + kind: 'directory', + outputDir: '.cursor/rules', + filePrefix: 'dwf-', + fileExtension: '.mdc', compile(rules: Rule[], _config: ProjectConfig): Map { const output = new Map(); diff --git a/packages/cli/src/bridges/gemini.ts b/packages/cli/src/bridges/gemini.ts index 34dfdc1..3eb4008 100644 --- a/packages/cli/src/bridges/gemini.ts +++ b/packages/cli/src/bridges/gemini.ts @@ -1,4 +1,4 @@ -import type { Bridge, Rule, ProjectConfig } from './types.js'; +import type { MarkerBridge, Rule, ProjectConfig } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; function buildMarkdown(rules: Rule[]): string { @@ -31,8 +31,9 @@ function buildMarkdown(rules: Rule[]): string { return lines.join('\n'); } -export const geminiBridge: Bridge = { +export const geminiBridge: MarkerBridge = { id: 'gemini', + kind: 'marker', outputPaths: ['GEMINI.md'], usesMarkers: true, diff --git a/packages/cli/src/bridges/types.ts b/packages/cli/src/bridges/types.ts index 5aef8e7..9bd96a3 100644 --- a/packages/cli/src/bridges/types.ts +++ b/packages/cli/src/bridges/types.ts @@ -51,9 +51,38 @@ export interface AssetEntry { installed_at: string; } -export interface Bridge { +interface BaseBridge { id: string; - outputPaths: string[]; - usesMarkers: boolean; compile(rules: Rule[], config: ProjectConfig): Map; } + +export interface DirectoryBridge extends BaseBridge { + kind: 'directory'; + outputDir: string; + filePrefix: string; + fileExtension: string; +} + +export interface MarkerBridge extends BaseBridge { + kind: 'marker'; + outputPaths: string[]; + usesMarkers: true; +} + +export type Bridge = DirectoryBridge | MarkerBridge; + +export function isDirectoryBridge(bridge: Bridge): bridge is DirectoryBridge { + return bridge.kind === 'directory'; +} + +export function isMarkerBridge(bridge: Bridge): bridge is MarkerBridge { + return bridge.kind === 'marker'; +} + +/** Get the known output paths for a bridge (for MarkerBridge returns outputPaths, for DirectoryBridge returns empty since paths are dynamic). */ +export function getBridgeOutputPaths(bridge: Bridge): string[] { + if (isMarkerBridge(bridge)) { + return bridge.outputPaths; + } + return []; +} diff --git a/packages/cli/src/bridges/windsurf.ts b/packages/cli/src/bridges/windsurf.ts index 49b7d5c..9ac41c7 100644 --- a/packages/cli/src/bridges/windsurf.ts +++ b/packages/cli/src/bridges/windsurf.ts @@ -1,4 +1,4 @@ -import type { Bridge, Rule, ProjectConfig } from './types.js'; +import type { DirectoryBridge, Rule, ProjectConfig } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; const WINDSURF_CHAR_LIMIT = 6000; @@ -43,10 +43,12 @@ function buildMarkdown(rules: Rule[]): string { return content; } -export const windsurfBridge: Bridge = { +export const windsurfBridge: DirectoryBridge = { id: 'windsurf', - outputPaths: ['.windsurf/rules/devworkflows.md'], - usesMarkers: false, + kind: 'directory', + outputDir: '.windsurf/rules', + filePrefix: 'dwf-', + fileExtension: '.md', compile(rules: Rule[], _config: ProjectConfig): Map { const output = new Map(); diff --git a/packages/cli/src/commands/compile.ts b/packages/cli/src/commands/compile.ts index 8b9343b..e87bf5b 100644 --- a/packages/cli/src/commands/compile.ts +++ b/packages/cli/src/commands/compile.ts @@ -6,6 +6,7 @@ import { readConfig, readRules } from '../core/parser.js'; import { computeRulesHash, writeHash } from '../core/hash.js'; import { deployAssets } from '../core/assets.js'; import type { Bridge } from '../bridges/types.js'; +import { isMarkerBridge, getBridgeOutputPaths } from '../bridges/types.js'; import { claudeBridge } from '../bridges/claude.js'; import { cursorBridge } from '../bridges/cursor.js'; import { geminiBridge } from '../bridges/gemini.js'; @@ -75,11 +76,13 @@ export async function executePipeline(options: PipelineOptions): Promise 0) { + for (const relativePath of errorPaths) { + results.push({ bridgeId: bridge.id, outputPath: relativePath, success: false, error: message }); + } + } else { + results.push({ bridgeId: bridge.id, outputPath: bridge.id, success: false, error: message }); } } } diff --git a/packages/cli/src/commands/doctor.ts b/packages/cli/src/commands/doctor.ts index b2c0092..4778cfa 100644 --- a/packages/cli/src/commands/doctor.ts +++ b/packages/cli/src/commands/doctor.ts @@ -10,6 +10,7 @@ import { geminiBridge } from '../bridges/gemini.js'; import { windsurfBridge } from '../bridges/windsurf.js'; import { copilotBridge } from '../bridges/copilot.js'; import type { Bridge, ProjectConfig, PulledEntry, AssetEntry, Rule } from '../bridges/types.js'; +import { getBridgeOutputPaths } from '../bridges/types.js'; import { fileExists } from '../utils/fs.js'; import { isValidScope } from '../core/schema.js'; import * as ui from '../utils/ui.js'; @@ -158,7 +159,7 @@ export async function checkSymlinks(cwd: string, config: ProjectConfig): Promise for (const bridge of BRIDGES) { if (!config.tools.includes(bridge.id)) continue; - for (const outputPath of bridge.outputPaths) { + for (const outputPath of getBridgeOutputPaths(bridge)) { const absolutePath = join(cwd, outputPath); try { const stat = await lstat(absolutePath); diff --git a/packages/cli/src/commands/explain.ts b/packages/cli/src/commands/explain.ts index 4569475..9809b0b 100644 --- a/packages/cli/src/commands/explain.ts +++ b/packages/cli/src/commands/explain.ts @@ -3,6 +3,7 @@ import type { Command } from 'commander'; import chalk from 'chalk'; import { readConfig, readRules } from '../core/parser.js'; import type { Bridge, Rule } from '../bridges/types.js'; +import { isMarkerBridge, isDirectoryBridge, getBridgeOutputPaths } from '../bridges/types.js'; import { claudeBridge } from '../bridges/claude.js'; import { cursorBridge } from '../bridges/cursor.js'; import { geminiBridge } from '../bridges/gemini.js'; @@ -26,7 +27,7 @@ function getBridge(id: string): Bridge | undefined { } function getModeLabel(bridge: Bridge): string { - if (bridge.usesMarkers) { + if (isMarkerBridge(bridge)) { return 'markers (BEGIN/END)'; } return 'full file'; @@ -81,7 +82,8 @@ async function runExplain(options: ExplainOptions): Promise { const bridge = getBridge(toolId); if (!bridge) continue; - const outputPath = bridge.outputPaths[0] ?? toolId; + const bridgePaths = getBridgeOutputPaths(bridge); + const outputPath = bridgePaths[0] ?? (isDirectoryBridge(bridge) ? `${bridge.outputDir}/${bridge.filePrefix}*${bridge.fileExtension}` : toolId); console.log(` ${formatSeparator(toolId)}`); ui.newline(); @@ -108,7 +110,10 @@ async function runExplain(options: ExplainOptions): Promise { if (bridge.id === 'windsurf') { const outputs = bridge.compile(rules, config); - const content = outputs.get('.windsurf/rules/devworkflows.md') ?? ''; + let content = ''; + for (const [, val] of outputs) { + content += val; + } const charCount = content.length; const formatted = `${String(charCount).replace(/\B(?=(\d{3})+(?!\d))/g, ',')} / ${String(WINDSURF_CHAR_LIMIT).replace(/\B(?=(\d{3})+(?!\d))/g, ',')} chars`; ui.newline(); diff --git a/packages/cli/src/commands/list.ts b/packages/cli/src/commands/list.ts index c424635..4c7198e 100644 --- a/packages/cli/src/commands/list.ts +++ b/packages/cli/src/commands/list.ts @@ -9,7 +9,7 @@ import { geminiBridge } from '../bridges/gemini.js'; import { windsurfBridge } from '../bridges/windsurf.js'; import { copilotBridge } from '../bridges/copilot.js'; import type { Bridge } from '../bridges/types.js'; -import { ASSET_TYPE } from '../bridges/types.js'; +import { ASSET_TYPE, isDirectoryBridge, getBridgeOutputPaths } from '../bridges/types.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; @@ -80,9 +80,17 @@ async function listTools(): Promise { ui.newline(); for (const tool of config.tools) { const bridge = BRIDGES.find((b) => b.id === tool); - const outputPath = bridge?.outputPaths[0]; - if (outputPath) { - console.log(` ${chalk.dim(ICONS.bullet)} ${chalk.cyan(tool.padEnd(12))}${chalk.dim(ICONS.arrow)} ${chalk.dim(outputPath)}`); + let outputLabel: string | undefined; + if (bridge) { + if (isDirectoryBridge(bridge)) { + outputLabel = `${bridge.outputDir}/${bridge.filePrefix}*${bridge.fileExtension}`; + } else { + const paths = getBridgeOutputPaths(bridge); + outputLabel = paths[0]; + } + } + if (outputLabel) { + console.log(` ${chalk.dim(ICONS.bullet)} ${chalk.cyan(tool.padEnd(12))}${chalk.dim(ICONS.arrow)} ${chalk.dim(outputLabel)}`); } else { console.log(` ${chalk.dim(ICONS.bullet)} ${chalk.cyan(tool)}`); } diff --git a/packages/cli/tests/bridges/copilot.test.ts b/packages/cli/tests/bridges/copilot.test.ts index 79e8085..674f6a9 100644 --- a/packages/cli/tests/bridges/copilot.test.ts +++ b/packages/cli/tests/bridges/copilot.test.ts @@ -29,6 +29,10 @@ describe('copilotBridge', () => { assert.equal(copilotBridge.id, 'copilot'); }); + it('has kind marker', () => { + assert.equal(copilotBridge.kind, 'marker'); + }); + it('has correct output path', () => { assert.deepEqual(copilotBridge.outputPaths, ['.github/copilot-instructions.md']); }); diff --git a/packages/cli/tests/bridges/windsurf.test.ts b/packages/cli/tests/bridges/windsurf.test.ts index 2c7d239..1db236f 100644 --- a/packages/cli/tests/bridges/windsurf.test.ts +++ b/packages/cli/tests/bridges/windsurf.test.ts @@ -29,12 +29,14 @@ describe('windsurfBridge', () => { assert.equal(windsurfBridge.id, 'windsurf'); }); - it('has correct output path', () => { - assert.deepEqual(windsurfBridge.outputPaths, ['.windsurf/rules/devworkflows.md']); + it('has kind directory', () => { + assert.equal(windsurfBridge.kind, 'directory'); }); - it('does not use markers', () => { - assert.equal(windsurfBridge.usesMarkers, false); + it('has correct output directory config', () => { + assert.equal(windsurfBridge.outputDir, '.windsurf/rules'); + assert.equal(windsurfBridge.filePrefix, 'dwf-'); + assert.equal(windsurfBridge.fileExtension, '.md'); }); it('generates correct markdown output', () => { diff --git a/packages/cli/tests/commands/doctor.test.ts b/packages/cli/tests/commands/doctor.test.ts index 5fd373d..be10ad3 100644 --- a/packages/cli/tests/commands/doctor.test.ts +++ b/packages/cli/tests/commands/doctor.test.ts @@ -273,7 +273,7 @@ blocks: [] const config: ProjectConfig = { version: '0.1', project: { name: 'test' }, - tools: ['claude'], + tools: ['copilot'], mode: 'link', blocks: [], pulled: [], @@ -281,10 +281,11 @@ blocks: [] }; // Create a target file and a symlink pointing to it - const targetPath = join(tmpDir, '.dwf', '.cache', 'CLAUDE.md'); - await mkdir(join(tmpDir, '.dwf', '.cache'), { recursive: true }); + await mkdir(join(tmpDir, '.dwf', '.cache', '.github'), { recursive: true }); + await mkdir(join(tmpDir, '.github'), { recursive: true }); + const targetPath = join(tmpDir, '.dwf', '.cache', '.github', 'copilot-instructions.md'); await writeFile(targetPath, 'content'); - await symlink(targetPath, join(tmpDir, 'CLAUDE.md')); + await symlink(targetPath, join(tmpDir, '.github', 'copilot-instructions.md')); const result = await checkSymlinks(tmpDir, config); assert.equal(result.passed, true); @@ -295,7 +296,7 @@ blocks: [] const config: ProjectConfig = { version: '0.1', project: { name: 'test' }, - tools: ['claude'], + tools: ['copilot'], mode: 'link', blocks: [], pulled: [], @@ -303,12 +304,13 @@ blocks: [] }; // Create a symlink pointing to a non-existent target - const brokenTarget = join(tmpDir, '.dwf', '.cache', 'CLAUDE.md'); - await symlink(brokenTarget, join(tmpDir, 'CLAUDE.md')); + await mkdir(join(tmpDir, '.github'), { recursive: true }); + const brokenTarget = join(tmpDir, '.dwf', '.cache', '.github', 'copilot-instructions.md'); + await symlink(brokenTarget, join(tmpDir, '.github', 'copilot-instructions.md')); const result = await checkSymlinks(tmpDir, config); assert.equal(result.passed, false); - assert.ok(result.message.includes('CLAUDE.md')); + assert.ok(result.message.includes('copilot-instructions.md')); }); }); diff --git a/packages/cli/tests/commands/explain.test.ts b/packages/cli/tests/commands/explain.test.ts index 8c66bc8..7d6d013 100644 --- a/packages/cli/tests/commands/explain.test.ts +++ b/packages/cli/tests/commands/explain.test.ts @@ -75,7 +75,7 @@ describe('devw explain', () => { assert.equal(result.exitCode, 0); assert.ok(result.stdout.includes('claude')); - assert.ok(result.stdout.includes('CLAUDE.md')); + assert.ok(result.stdout.includes('.claude/rules/dwf-')); assert.ok(result.stdout.includes('Rules:')); assert.ok(result.stdout.includes('architecture:')); }); @@ -102,7 +102,7 @@ describe('devw explain', () => { assert.equal(result.exitCode, 0); assert.ok(result.stdout.includes('windsurf')); - assert.ok(result.stdout.includes('.windsurf/rules/devworkflows.md')); + assert.ok(result.stdout.includes('.windsurf/rules/dwf-')); assert.ok(!result.stdout.includes('claude')); }); diff --git a/packages/cli/tests/e2e/cli.test.ts b/packages/cli/tests/e2e/cli.test.ts index 155796e..1663341 100644 --- a/packages/cli/tests/e2e/cli.test.ts +++ b/packages/cli/tests/e2e/cli.test.ts @@ -78,7 +78,7 @@ describe('devw CLI e2e', () => { assert.ok(result.stderr.includes('already exists')); }); - it('compile generates CLAUDE.md with markers when rules exist', async () => { + it('compile generates CLAUDE.md when rules exist', async () => { await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); // Write a manual rule so compile has something to output @@ -100,12 +100,12 @@ rules: assert.ok(result.stdout.includes('Compiled')); const claudeMd = await readFile(join(tmpDir, 'CLAUDE.md'), 'utf-8'); - assert.ok(claudeMd.includes('')); - assert.ok(claudeMd.includes('')); + assert.ok(claudeMd.includes('# Project Rules')); + assert.ok(claudeMd.includes('Always test your code.')); }); - it('compile preserves user content outside markers', async () => { - await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); + it('compile with copilot preserves user content outside markers', async () => { + await run(['init', '--tools', 'copilot', '--mode', 'copy', '-y'], tmpDir); const rulesPath = join(tmpDir, '.dwf', 'rules', 'conventions.yml'); await writeFile( @@ -121,14 +121,14 @@ rules: await run(['compile'], tmpDir); - const claudeMd = await readFile(join(tmpDir, 'CLAUDE.md'), 'utf-8'); - const withUserContent = `# My Custom Rules\n\nDo not touch this.\n\n${claudeMd}\n# Footer\n\nAlso keep this.\n`; - await writeFile(join(tmpDir, 'CLAUDE.md'), withUserContent, 'utf-8'); + const copilotMd = await readFile(join(tmpDir, '.github', 'copilot-instructions.md'), 'utf-8'); + const withUserContent = `# My Custom Rules\n\nDo not touch this.\n\n${copilotMd}\n# Footer\n\nAlso keep this.\n`; + await writeFile(join(tmpDir, '.github', 'copilot-instructions.md'), withUserContent, 'utf-8'); const result = await run(['compile'], tmpDir); assert.equal(result.exitCode, 0); - const updated = await readFile(join(tmpDir, 'CLAUDE.md'), 'utf-8'); + const updated = await readFile(join(tmpDir, '.github', 'copilot-instructions.md'), 'utf-8'); assert.ok(updated.includes('# My Custom Rules')); assert.ok(updated.includes('Do not touch this.')); assert.ok(updated.includes('# Footer')); diff --git a/packages/cli/tests/ui/output.test.ts b/packages/cli/tests/ui/output.test.ts index 6b88b9c..287f28c 100644 --- a/packages/cli/tests/ui/output.test.ts +++ b/packages/cli/tests/ui/output.test.ts @@ -191,7 +191,7 @@ describe('output format: list tools', () => { assert.equal(result.exitCode, 0); assert.ok(result.stdout.includes('\u203A'), 'should have bullet prefix'); assert.ok(result.stdout.includes('\u2192'), 'should have arrow'); - assert.ok(result.stdout.includes('CLAUDE.md'), 'should show output path'); + assert.ok(result.stdout.includes('.claude/rules/dwf-'), 'should show output directory pattern'); }); }); @@ -233,12 +233,12 @@ describe('output format: explain', () => { it('shows new mode labels', async () => { await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); - await writeFile(join(tmpDir, '.dwf', 'config.yml'), CONFIG_TEMPLATE(['claude', 'cursor'])); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), CONFIG_TEMPLATE(['copilot', 'cursor'])); await writeFile(join(tmpDir, '.dwf', 'rules', 'architecture.yml'), RULES_WITH_MIX); const result = await run(['explain'], tmpDir); - assert.ok(result.stdout.includes('markers (BEGIN/END)'), 'should show markers mode for claude'); + assert.ok(result.stdout.includes('markers (BEGIN/END)'), 'should show markers mode for copilot'); assert.ok(result.stdout.includes('full file'), 'should show full file mode for cursor'); }); }); From ae4e43dffcfe50bd905e258718c8e47af56261bb Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:10:54 +0200 Subject: [PATCH 03/18] feat(core): add scopeToFilename and cleanStaleFiles utilities scopeToFilename converts scope names to filenames by replacing colons with hyphens (e.g., team:payments -> dwf-team-payments.md). cleanStaleFiles removes orphaned dwf-* files from a directory that are not in the current expected set, for cleanup after compilation. Includes comprehensive tests for both functions covering built-in scopes, custom scopes with colons, stale file removal, and edge cases. --- packages/cli/src/core/scope-filename.ts | 53 +++++++ .../cli/tests/core/scope-filename.test.ts | 149 ++++++++++++++++++ 2 files changed, 202 insertions(+) create mode 100644 packages/cli/src/core/scope-filename.ts create mode 100644 packages/cli/tests/core/scope-filename.test.ts diff --git a/packages/cli/src/core/scope-filename.ts b/packages/cli/src/core/scope-filename.ts new file mode 100644 index 0000000..68ae1aa --- /dev/null +++ b/packages/cli/src/core/scope-filename.ts @@ -0,0 +1,53 @@ +import { readdir, unlink } from 'node:fs/promises'; +import { join } from 'node:path'; + +const COLON_SEPARATOR = '-'; + +/** + * Convert a scope name to a filename using the given prefix and extension. + * + * Colons in the scope are replaced with hyphens. + * Example: scopeToFilename('team:payments', 'dwf-', '.md') => 'dwf-team-payments.md' + */ +export function scopeToFilename(scope: string, prefix: string, extension: string): string { + const sanitized = scope.replaceAll(':', COLON_SEPARATOR); + return `${prefix}${sanitized}${extension}`; +} + +/** + * Glob for files matching {prefix}*{extension} in a directory, + * delete any that are NOT in the currentFiles set. + * Returns the list of deleted file paths (relative to dir). + * + * Only touches files that start with the given prefix. + * Ignores files that don't match the prefix pattern. + * If the directory does not exist, returns an empty array without error. + */ +export async function cleanStaleFiles( + dir: string, + prefix: string, + extension: string, + currentFiles: Set, +): Promise { + let entries: string[]; + try { + entries = await readdir(dir); + } catch { + return []; + } + + const deleted: string[] = []; + + for (const entry of entries) { + if (!entry.startsWith(prefix) || !entry.endsWith(extension)) { + continue; + } + + if (!currentFiles.has(entry)) { + await unlink(join(dir, entry)); + deleted.push(entry); + } + } + + return deleted; +} diff --git a/packages/cli/tests/core/scope-filename.test.ts b/packages/cli/tests/core/scope-filename.test.ts new file mode 100644 index 0000000..e60a1aa --- /dev/null +++ b/packages/cli/tests/core/scope-filename.test.ts @@ -0,0 +1,149 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, writeFile, readdir } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { scopeToFilename, cleanStaleFiles } from '../../src/core/scope-filename.js'; + +describe('scopeToFilename', () => { + it('converts built-in scope "conventions"', () => { + const result = scopeToFilename('conventions', 'dwf-', '.md'); + assert.equal(result, 'dwf-conventions.md'); + }); + + it('converts built-in scope "security"', () => { + const result = scopeToFilename('security', 'dwf-', '.md'); + assert.equal(result, 'dwf-security.md'); + }); + + it('converts built-in scope "testing"', () => { + const result = scopeToFilename('testing', 'dwf-', '.mdc'); + assert.equal(result, 'dwf-testing.mdc'); + }); + + it('converts built-in scope "architecture"', () => { + const result = scopeToFilename('architecture', 'dwf-', '.md'); + assert.equal(result, 'dwf-architecture.md'); + }); + + it('converts built-in scope "workflow"', () => { + const result = scopeToFilename('workflow', 'dwf-', '.md'); + assert.equal(result, 'dwf-workflow.md'); + }); + + it('replaces single colon in custom scope', () => { + const result = scopeToFilename('team:payments', 'dwf-', '.md'); + assert.equal(result, 'dwf-team-payments.md'); + }); + + it('replaces multiple colons in custom scope', () => { + const result = scopeToFilename('team:payments:billing', 'dwf-', '.md'); + assert.equal(result, 'dwf-team-payments-billing.md'); + }); + + it('works with .mdc extension for cursor', () => { + const result = scopeToFilename('team:payments', 'dwf-', '.mdc'); + assert.equal(result, 'dwf-team-payments.mdc'); + }); + + it('works with .instructions.md extension', () => { + const result = scopeToFilename('security', 'dwf-', '.instructions.md'); + assert.equal(result, 'dwf-security.instructions.md'); + }); + + it('handles scope with no colon', () => { + const result = scopeToFilename('conventions', 'dwf-', '.md'); + assert.equal(result, 'dwf-conventions.md'); + }); +}); + +describe('cleanStaleFiles', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-stale-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('removes orphaned files with matching prefix and extension', async () => { + await writeFile(join(tmpDir, 'dwf-conventions.md'), 'content'); + await writeFile(join(tmpDir, 'dwf-security.md'), 'content'); + await writeFile(join(tmpDir, 'dwf-testing.md'), 'content'); + + const currentFiles = new Set(['dwf-conventions.md', 'dwf-security.md']); + const deleted = await cleanStaleFiles(tmpDir, 'dwf-', '.md', currentFiles); + + assert.deepEqual(deleted, ['dwf-testing.md']); + + const remaining = await readdir(tmpDir); + assert.ok(remaining.includes('dwf-conventions.md')); + assert.ok(remaining.includes('dwf-security.md')); + assert.ok(!remaining.includes('dwf-testing.md')); + }); + + it('preserves current files', async () => { + await writeFile(join(tmpDir, 'dwf-conventions.md'), 'content'); + await writeFile(join(tmpDir, 'dwf-security.md'), 'content'); + + const currentFiles = new Set(['dwf-conventions.md', 'dwf-security.md']); + const deleted = await cleanStaleFiles(tmpDir, 'dwf-', '.md', currentFiles); + + assert.deepEqual(deleted, []); + + const remaining = await readdir(tmpDir); + assert.equal(remaining.length, 2); + }); + + it('ignores non-matching files (no prefix match)', async () => { + await writeFile(join(tmpDir, 'dwf-conventions.md'), 'content'); + await writeFile(join(tmpDir, 'my-custom-rule.md'), 'user content'); + await writeFile(join(tmpDir, 'README.md'), 'readme'); + + const currentFiles = new Set(['dwf-conventions.md']); + const deleted = await cleanStaleFiles(tmpDir, 'dwf-', '.md', currentFiles); + + assert.deepEqual(deleted, []); + + const remaining = await readdir(tmpDir); + assert.ok(remaining.includes('my-custom-rule.md')); + assert.ok(remaining.includes('README.md')); + assert.ok(remaining.includes('dwf-conventions.md')); + }); + + it('ignores files with wrong extension', async () => { + await writeFile(join(tmpDir, 'dwf-conventions.md'), 'content'); + await writeFile(join(tmpDir, 'dwf-conventions.mdc'), 'cursor content'); + + const currentFiles = new Set(['dwf-conventions.md']); + const deleted = await cleanStaleFiles(tmpDir, 'dwf-', '.md', currentFiles); + + assert.deepEqual(deleted, []); + + const remaining = await readdir(tmpDir); + assert.ok(remaining.includes('dwf-conventions.mdc')); + }); + + it('returns empty array for non-existent directory', async () => { + const nonExistent = join(tmpDir, 'does-not-exist'); + const deleted = await cleanStaleFiles(nonExistent, 'dwf-', '.md', new Set()); + + assert.deepEqual(deleted, []); + }); + + it('removes all stale files when currentFiles is empty', async () => { + await writeFile(join(tmpDir, 'dwf-conventions.md'), 'content'); + await writeFile(join(tmpDir, 'dwf-security.md'), 'content'); + + const deleted = await cleanStaleFiles(tmpDir, 'dwf-', '.md', new Set()); + + assert.equal(deleted.length, 2); + assert.ok(deleted.includes('dwf-conventions.md')); + assert.ok(deleted.includes('dwf-security.md')); + + const remaining = await readdir(tmpDir); + assert.equal(remaining.length, 0); + }); +}); From 740a882bbb7f2201ea9b798fcb2e188ff9078717 Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:22:28 +0200 Subject: [PATCH 04/18] feat(core): add legacy file detection and migration utilities MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Create cleanup.ts with detectLegacyFiles(), migrateLegacyFiles(), and removeLegacyMarkerBlock() for v0.5/v0.6 migration. Detects legacy .cursor/rules/devworkflows.mdc, .windsurf/rules/devworkflows.md, and CLAUDE.md with markers. Idempotent — skips missing files silently. Includes 17 tests covering detection, migration, marker removal, idempotency, and edge cases. --- packages/cli/src/core/cleanup.ts | 112 +++++++++ packages/cli/tests/core/cleanup.test.ts | 299 ++++++++++++++++++++++++ 2 files changed, 411 insertions(+) create mode 100644 packages/cli/src/core/cleanup.ts create mode 100644 packages/cli/tests/core/cleanup.test.ts diff --git a/packages/cli/src/core/cleanup.ts b/packages/cli/src/core/cleanup.ts new file mode 100644 index 0000000..3a7f56e --- /dev/null +++ b/packages/cli/src/core/cleanup.ts @@ -0,0 +1,112 @@ +import { readFile, unlink, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { fileExists } from '../utils/fs.js'; +import { removeMarkedBlock } from './markers.js'; + +export interface LegacyFile { + path: string; + type: 'marker' | 'full-file'; + bridgeId: string; +} + +/** + * Detect legacy single-file output from v0.5/v0.6 that needs migration. + * + * Only .cursor and .windsurf had full-file replacement, so only those are legacy. + * CLAUDE.md markers need to be removed since Claude is now a DirectoryBridge. + * GEMINI.md and .github/copilot-instructions.md are NOT legacy (they remain MarkerBridge). + */ +export async function detectLegacyFiles(cwd: string): Promise { + const legacyFiles: LegacyFile[] = []; + + // Check for legacy .cursor/rules/devworkflows.mdc (full-file) + const cursorLegacy = join(cwd, '.cursor', 'rules', 'devworkflows.mdc'); + if (await fileExists(cursorLegacy)) { + legacyFiles.push({ + path: cursorLegacy, + type: 'full-file', + bridgeId: 'cursor', + }); + } + + // Check for legacy .windsurf/rules/devworkflows.md (full-file) + const windsurfLegacy = join(cwd, '.windsurf', 'rules', 'devworkflows.md'); + if (await fileExists(windsurfLegacy)) { + legacyFiles.push({ + path: windsurfLegacy, + type: 'full-file', + bridgeId: 'windsurf', + }); + } + + // Check for CLAUDE.md with dev-workflows markers (marker type) + const claudeLegacy = join(cwd, 'CLAUDE.md'); + if (await fileExists(claudeLegacy)) { + const content = await readFile(claudeLegacy, 'utf-8'); + if (content.includes('') && content.includes('')) { + legacyFiles.push({ + path: claudeLegacy, + type: 'marker', + bridgeId: 'claude', + }); + } + } + + return legacyFiles; +} + +/** + * Remove legacy files. For full-file types, delete the file. + * For marker types, remove the marker block preserving user content. + * Returns list of actions taken (for user messaging). + * + * This is idempotent — if files don't exist, skip silently. + */ +export async function migrateLegacyFiles(_cwd: string, legacyFiles: LegacyFile[]): Promise { + const actions: string[] = []; + + for (const legacy of legacyFiles) { + if (!(await fileExists(legacy.path))) { + continue; + } + + if (legacy.type === 'full-file') { + await unlink(legacy.path); + actions.push(`Removed legacy ${legacy.path}`); + } else if (legacy.type === 'marker') { + const removed = await removeLegacyMarkerBlock(legacy.path); + if (removed) { + actions.push(`Removed devw block from ${legacy.path}`); + } + } + } + + return actions; +} + +/** + * Remove the old marker block (BEGIN/END dev-workflows) from a file. + * If the file becomes empty after removal, delete it. + * Returns true if changes were made. + */ +export async function removeLegacyMarkerBlock(filePath: string): Promise { + if (!(await fileExists(filePath))) { + return false; + } + + const content = await readFile(filePath, 'utf-8'); + + if (!content.includes('') || !content.includes('')) { + return false; + } + + const cleaned = removeMarkedBlock(content); + + if (cleaned.trim() === '') { + await unlink(filePath); + } else { + await writeFile(filePath, cleaned, 'utf-8'); + } + + return true; +} diff --git a/packages/cli/tests/core/cleanup.test.ts b/packages/cli/tests/core/cleanup.test.ts new file mode 100644 index 0000000..e85a999 --- /dev/null +++ b/packages/cli/tests/core/cleanup.test.ts @@ -0,0 +1,299 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, mkdir, writeFile, readFile, readdir } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { + detectLegacyFiles, + migrateLegacyFiles, + removeLegacyMarkerBlock, +} from '../../src/core/cleanup.js'; +import type { LegacyFile } from '../../src/core/cleanup.js'; + +describe('detectLegacyFiles', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-cleanup-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('detects legacy .cursor/rules/devworkflows.mdc', async () => { + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'old cursor content'); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 1); + assert.equal(legacy[0]?.type, 'full-file'); + assert.equal(legacy[0]?.bridgeId, 'cursor'); + }); + + it('detects legacy .windsurf/rules/devworkflows.md', async () => { + await mkdir(join(tmpDir, '.windsurf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.windsurf', 'rules', 'devworkflows.md'), 'old windsurf content'); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 1); + assert.equal(legacy[0]?.type, 'full-file'); + assert.equal(legacy[0]?.bridgeId, 'windsurf'); + }); + + it('detects CLAUDE.md with dev-workflows markers', async () => { + const claudeContent = [ + '# My Notes', + '', + '', + '# Project Rules', + '', + '', + '# Other stuff', + ].join('\n'); + await writeFile(join(tmpDir, 'CLAUDE.md'), claudeContent); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 1); + assert.equal(legacy[0]?.type, 'marker'); + assert.equal(legacy[0]?.bridgeId, 'claude'); + }); + + it('does NOT detect CLAUDE.md without markers', async () => { + await writeFile(join(tmpDir, 'CLAUDE.md'), '# Just a normal CLAUDE.md'); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 0); + }); + + it('does NOT detect GEMINI.md as legacy', async () => { + const geminiContent = [ + '', + '# Rules', + '', + ].join('\n'); + await writeFile(join(tmpDir, 'GEMINI.md'), geminiContent); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 0); + }); + + it('does NOT detect .github/copilot-instructions.md as legacy', async () => { + await mkdir(join(tmpDir, '.github'), { recursive: true }); + const copilotContent = [ + '', + '# Rules', + '', + ].join('\n'); + await writeFile(join(tmpDir, '.github', 'copilot-instructions.md'), copilotContent); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 0); + }); + + it('detects multiple legacy files at once', async () => { + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await mkdir(join(tmpDir, '.windsurf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'cursor'); + await writeFile(join(tmpDir, '.windsurf', 'rules', 'devworkflows.md'), 'windsurf'); + await writeFile( + join(tmpDir, 'CLAUDE.md'), + '\n# Rules\n', + ); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 3); + const bridgeIds = legacy.map((l) => l.bridgeId).sort(); + assert.deepEqual(bridgeIds, ['claude', 'cursor', 'windsurf']); + }); + + it('returns empty array when no legacy files exist', async () => { + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 0); + }); +}); + +describe('migrateLegacyFiles', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-migrate-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('deletes full-file legacy files', async () => { + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'old content'); + + const legacyFiles: LegacyFile[] = [{ + path: join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), + type: 'full-file', + bridgeId: 'cursor', + }]; + + const actions = await migrateLegacyFiles(tmpDir, legacyFiles); + + assert.equal(actions.length, 1); + assert.ok(actions[0]?.includes('Removed legacy')); + + const remaining = await readdir(join(tmpDir, '.cursor', 'rules')); + assert.equal(remaining.length, 0); + }); + + it('removes marker block from CLAUDE.md preserving manual content', async () => { + const claudeContent = [ + '# My Custom Notes', + '', + '', + '# Project Rules', + '', + '## Architecture', + '', + '- Use named exports.', + '', + '', + '# Other important stuff', + ].join('\n'); + await writeFile(join(tmpDir, 'CLAUDE.md'), claudeContent); + + const legacyFiles: LegacyFile[] = [{ + path: join(tmpDir, 'CLAUDE.md'), + type: 'marker', + bridgeId: 'claude', + }]; + + const actions = await migrateLegacyFiles(tmpDir, legacyFiles); + + assert.equal(actions.length, 1); + assert.ok(actions[0]?.includes('Removed devw block')); + + const content = await readFile(join(tmpDir, 'CLAUDE.md'), 'utf-8'); + assert.ok(content.includes('# My Custom Notes')); + assert.ok(content.includes('# Other important stuff')); + assert.ok(!content.includes('BEGIN dev-workflows')); + assert.ok(!content.includes('END dev-workflows')); + assert.ok(!content.includes('Use named exports')); + }); + + it('deletes CLAUDE.md if it becomes empty after marker removal', async () => { + const claudeContent = [ + '', + '# Project Rules', + '', + ].join('\n'); + await writeFile(join(tmpDir, 'CLAUDE.md'), claudeContent); + + const legacyFiles: LegacyFile[] = [{ + path: join(tmpDir, 'CLAUDE.md'), + type: 'marker', + bridgeId: 'claude', + }]; + + const actions = await migrateLegacyFiles(tmpDir, legacyFiles); + + assert.equal(actions.length, 1); + + const entries = await readdir(tmpDir); + assert.ok(!entries.includes('CLAUDE.md')); + }); + + it('is idempotent — skips files that do not exist', async () => { + const legacyFiles: LegacyFile[] = [{ + path: join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), + type: 'full-file', + bridgeId: 'cursor', + }]; + + const actions = await migrateLegacyFiles(tmpDir, legacyFiles); + + assert.equal(actions.length, 0); + }); + + it('handles mixed legacy file types', async () => { + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'old cursor'); + await writeFile( + join(tmpDir, 'CLAUDE.md'), + '# Notes\n\n\nRules\n\n\n# More', + ); + + const legacyFiles: LegacyFile[] = [ + { + path: join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), + type: 'full-file', + bridgeId: 'cursor', + }, + { + path: join(tmpDir, 'CLAUDE.md'), + type: 'marker', + bridgeId: 'claude', + }, + ]; + + const actions = await migrateLegacyFiles(tmpDir, legacyFiles); + + assert.equal(actions.length, 2); + }); +}); + +describe('removeLegacyMarkerBlock', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-marker-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('removes marker block and preserves surrounding content', async () => { + const filePath = join(tmpDir, 'test.md'); + await writeFile(filePath, '# Before\n\n\nRules\n\n\n# After'); + + const result = await removeLegacyMarkerBlock(filePath); + + assert.equal(result, true); + const content = await readFile(filePath, 'utf-8'); + assert.ok(content.includes('# Before')); + assert.ok(content.includes('# After')); + assert.ok(!content.includes('BEGIN dev-workflows')); + }); + + it('returns false if file does not exist', async () => { + const result = await removeLegacyMarkerBlock(join(tmpDir, 'nonexistent.md')); + + assert.equal(result, false); + }); + + it('returns false if file has no markers', async () => { + const filePath = join(tmpDir, 'test.md'); + await writeFile(filePath, '# Just normal content'); + + const result = await removeLegacyMarkerBlock(filePath); + + assert.equal(result, false); + }); + + it('deletes file if it becomes empty after removal', async () => { + const filePath = join(tmpDir, 'test.md'); + await writeFile(filePath, '\nRules\n'); + + const result = await removeLegacyMarkerBlock(filePath); + + assert.equal(result, true); + const entries = await readdir(tmpDir); + assert.ok(!entries.includes('test.md')); + }); +}); From b1e9eec500e5808221d747b52558e0495cd4d33f Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:22:45 +0200 Subject: [PATCH 05/18] feat(bridges): refactor Claude, Cursor, and Windsurf to multi-file output Each DirectoryBridge now generates one file per scope instead of a single monolithic file. Keys follow the pattern {outputDir}/dwf-{scope}.{ext}. - Claude: paths frontmatter when metadata.paths set, no frontmatter for global scope - Cursor: description + globs/alwaysApply frontmatter per scope - Windsurf: trigger frontmatter (default: always), globs when trigger=glob. Char limit warning now applies per-file. Includes comprehensive tests for each bridge covering multi-file output, frontmatter variants, custom scopes with colons, and metadata handling. --- packages/cli/src/bridges/claude.ts | 74 +++++--- packages/cli/src/bridges/cursor.ts | 85 +++++---- packages/cli/src/bridges/windsurf.ts | 76 +++++--- packages/cli/tests/bridges/claude.test.ts | 199 ++++++++++++++++++++ packages/cli/tests/bridges/cursor.test.ts | 188 ++++++++++++++++++ packages/cli/tests/bridges/windsurf.test.ts | 139 +++++++++++--- 6 files changed, 655 insertions(+), 106 deletions(-) create mode 100644 packages/cli/tests/bridges/claude.test.ts create mode 100644 packages/cli/tests/bridges/cursor.test.ts diff --git a/packages/cli/src/bridges/claude.ts b/packages/cli/src/bridges/claude.ts index 5c33795..cf858ca 100644 --- a/packages/cli/src/bridges/claude.ts +++ b/packages/cli/src/bridges/claude.ts @@ -1,28 +1,51 @@ -import type { DirectoryBridge, Rule, ProjectConfig } from './types.js'; +import type { DirectoryBridge, Rule, ProjectConfig, ScopeMetadata } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; +import { scopeToFilename } from '../core/scope-filename.js'; + +const GENERATED_COMMENT = ''; + +function buildFrontmatter(metadata?: ScopeMetadata): string { + if (!metadata?.paths || metadata.paths.length === 0) { + return ''; + } -function buildMarkdown(rules: Rule[]): string { const lines: string[] = [ - '# Project Rules', + '---', + 'paths:', ]; + for (const p of metadata.paths) { + lines.push(` - "${p}"`); + } + lines.push('---'); - const filtered = filterRules(rules); - const grouped = groupByScope(filtered); - - for (const [scope, scopeRules] of grouped) { - lines.push('', `## ${formatScopeHeading(scope)}`); - lines.push(''); - for (const rule of scopeRules) { - const contentLines = rule.content.split('\n'); - const first = contentLines[0]; - if (first !== undefined) { - lines.push(`- ${first}`); - } - for (let i = 1; i < contentLines.length; i++) { - const line = contentLines[i]; - if (line !== undefined) { - lines.push(` ${line}`); - } + return lines.join('\n'); +} + +function buildScopeMarkdown(scope: string, rules: Rule[]): string { + const lines: string[] = []; + + // Get metadata from the first rule in the scope (all rules in a scope share metadata) + const metadata = rules[0]?.metadata; + const frontmatter = buildFrontmatter(metadata); + + if (frontmatter) { + lines.push(frontmatter); + } + + lines.push(GENERATED_COMMENT); + lines.push(`# ${formatScopeHeading(scope)}`); + lines.push(''); + + for (const rule of rules) { + const contentLines = rule.content.split('\n'); + const first = contentLines[0]; + if (first !== undefined) { + lines.push(`- ${first}`); + } + for (let i = 1; i < contentLines.length; i++) { + const line = contentLines[i]; + if (line !== undefined) { + lines.push(` ${line}`); } } } @@ -40,7 +63,16 @@ export const claudeBridge: DirectoryBridge = { compile(rules: Rule[], _config: ProjectConfig): Map { const output = new Map(); - output.set('CLAUDE.md', buildMarkdown(rules)); + + const filtered = filterRules(rules); + const grouped = groupByScope(filtered); + + for (const [scope, scopeRules] of grouped) { + const filename = scopeToFilename(scope, 'dwf-', '.md'); + const key = `.claude/rules/${filename}`; + output.set(key, buildScopeMarkdown(scope, scopeRules)); + } + return output; }, }; diff --git a/packages/cli/src/bridges/cursor.ts b/packages/cli/src/bridges/cursor.ts index 2f86f86..ecf557d 100644 --- a/packages/cli/src/bridges/cursor.ts +++ b/packages/cli/src/bridges/cursor.ts @@ -1,36 +1,48 @@ -import type { DirectoryBridge, Rule, ProjectConfig } from './types.js'; +import type { DirectoryBridge, Rule, ProjectConfig, ScopeMetadata } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; +import { scopeToFilename } from '../core/scope-filename.js'; -const FRONTMATTER = `--- -description: Project rules generated by dev-workflows -globs: -alwaysApply: true ----`; - -function buildMdc(rules: Rule[]): string { - const lines: string[] = [ - FRONTMATTER, - '', - '', - ]; - - const filtered = filterRules(rules); - const grouped = groupByScope(filtered); - - for (const [scope, scopeRules] of grouped) { - lines.push('', `## ${formatScopeHeading(scope)}`); - lines.push(''); - for (const rule of scopeRules) { - const contentLines = rule.content.split('\n'); - const first = contentLines[0]; - if (first !== undefined) { - lines.push(`- ${first}`); - } - for (let i = 1; i < contentLines.length; i++) { - const line = contentLines[i]; - if (line !== undefined) { - lines.push(` ${line}`); - } +const GENERATED_COMMENT = ''; + +function buildFrontmatter(scope: string, metadata?: ScopeMetadata): string { + const hasGlobs = metadata?.globs && metadata.globs.length > 0; + const description = `${formatScopeHeading(scope)} rules generated by dev-workflows`; + + const lines: string[] = ['---']; + lines.push(`description: ${description}`); + + if (hasGlobs && metadata?.globs) { + const globsStr = metadata.globs.map((g) => `"${g}"`).join(', '); + lines.push(`globs: [${globsStr}]`); + lines.push('alwaysApply: false'); + } else { + lines.push('globs:'); + lines.push('alwaysApply: true'); + } + + lines.push('---'); + return lines.join('\n'); +} + +function buildScopeMdc(scope: string, rules: Rule[]): string { + const lines: string[] = []; + + const metadata = rules[0]?.metadata; + lines.push(buildFrontmatter(scope, metadata)); + lines.push(GENERATED_COMMENT); + lines.push(`# ${formatScopeHeading(scope)}`); + lines.push(''); + + for (const rule of rules) { + const contentLines = rule.content.split('\n'); + const first = contentLines[0]; + if (first !== undefined) { + lines.push(`- ${first}`); + } + for (let i = 1; i < contentLines.length; i++) { + const line = contentLines[i]; + if (line !== undefined) { + lines.push(` ${line}`); } } } @@ -48,7 +60,16 @@ export const cursorBridge: DirectoryBridge = { compile(rules: Rule[], _config: ProjectConfig): Map { const output = new Map(); - output.set('.cursor/rules/devworkflows.mdc', buildMdc(rules)); + + const filtered = filterRules(rules); + const grouped = groupByScope(filtered); + + for (const [scope, scopeRules] of grouped) { + const filename = scopeToFilename(scope, 'dwf-', '.mdc'); + const key = `.cursor/rules/${filename}`; + output.set(key, buildScopeMdc(scope, scopeRules)); + } + return output; }, }; diff --git a/packages/cli/src/bridges/windsurf.ts b/packages/cli/src/bridges/windsurf.ts index 9ac41c7..96177c9 100644 --- a/packages/cli/src/bridges/windsurf.ts +++ b/packages/cli/src/bridges/windsurf.ts @@ -1,32 +1,45 @@ -import type { DirectoryBridge, Rule, ProjectConfig } from './types.js'; +import type { DirectoryBridge, Rule, ProjectConfig, ScopeMetadata } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; +import { scopeToFilename } from '../core/scope-filename.js'; const WINDSURF_CHAR_LIMIT = 6000; +const GENERATED_COMMENT = ''; -function buildMarkdown(rules: Rule[]): string { - const lines: string[] = [ - '', - '', - '# Project Rules', - ]; - - const filtered = filterRules(rules); - const grouped = groupByScope(filtered); - - for (const [scope, scopeRules] of grouped) { - lines.push('', `## ${formatScopeHeading(scope)}`); - lines.push(''); - for (const rule of scopeRules) { - const contentLines = rule.content.split('\n'); - const first = contentLines[0]; - if (first !== undefined) { - lines.push(`- ${first}`); - } - for (let i = 1; i < contentLines.length; i++) { - const line = contentLines[i]; - if (line !== undefined) { - lines.push(` ${line}`); - } +function buildFrontmatter(metadata?: ScopeMetadata): string { + const trigger = metadata?.trigger ?? 'always'; + const hasGlobs = metadata?.globs && metadata.globs.length > 0; + + const lines: string[] = ['---']; + lines.push(`trigger: ${trigger}`); + + if (hasGlobs && trigger === 'glob' && metadata?.globs) { + const globsStr = metadata.globs.map((g) => `"${g}"`).join(', '); + lines.push(`globs: [${globsStr}]`); + } + + lines.push('---'); + return lines.join('\n'); +} + +function buildScopeMarkdown(scope: string, rules: Rule[]): string { + const lines: string[] = []; + + const metadata = rules[0]?.metadata; + lines.push(buildFrontmatter(metadata)); + lines.push(GENERATED_COMMENT); + lines.push(`# ${formatScopeHeading(scope)}`); + lines.push(''); + + for (const rule of rules) { + const contentLines = rule.content.split('\n'); + const first = contentLines[0]; + if (first !== undefined) { + lines.push(`- ${first}`); + } + for (let i = 1; i < contentLines.length; i++) { + const line = contentLines[i]; + if (line !== undefined) { + lines.push(` ${line}`); } } } @@ -36,7 +49,7 @@ function buildMarkdown(rules: Rule[]): string { if (content.length > WINDSURF_CHAR_LIMIT) { console.warn( - `Warning: Windsurf output is ${String(content.length)} chars (limit: ${String(WINDSURF_CHAR_LIMIT)}). Windsurf may truncate the content.`, + `Warning: Windsurf file for scope "${scope}" is ${String(content.length)} chars (limit: ${String(WINDSURF_CHAR_LIMIT)}). Windsurf may truncate the content.`, ); } @@ -52,7 +65,16 @@ export const windsurfBridge: DirectoryBridge = { compile(rules: Rule[], _config: ProjectConfig): Map { const output = new Map(); - output.set('.windsurf/rules/devworkflows.md', buildMarkdown(rules)); + + const filtered = filterRules(rules); + const grouped = groupByScope(filtered); + + for (const [scope, scopeRules] of grouped) { + const filename = scopeToFilename(scope, 'dwf-', '.md'); + const key = `.windsurf/rules/${filename}`; + output.set(key, buildScopeMarkdown(scope, scopeRules)); + } + return output; }, }; diff --git a/packages/cli/tests/bridges/claude.test.ts b/packages/cli/tests/bridges/claude.test.ts new file mode 100644 index 0000000..9b425f7 --- /dev/null +++ b/packages/cli/tests/bridges/claude.test.ts @@ -0,0 +1,199 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { claudeBridge } from '../../src/bridges/claude.js'; +import type { Rule, ProjectConfig } from '../../src/bridges/types.js'; + +function makeRule(overrides: Partial = {}): Rule { + return { + id: 'test-rule', + scope: 'architecture', + severity: 'error', + content: 'Test content', + enabled: true, + ...overrides, + }; +} + +const CONFIG: ProjectConfig = { + version: '0.1', + project: { name: 'test' }, + tools: ['claude'], + mode: 'copy', + blocks: [], + pulled: [], + assets: [], +}; + +describe('claudeBridge', () => { + it('has correct id', () => { + assert.equal(claudeBridge.id, 'claude'); + }); + + it('has kind directory', () => { + assert.equal(claudeBridge.kind, 'directory'); + }); + + it('has correct output directory config', () => { + assert.equal(claudeBridge.outputDir, '.claude/rules'); + assert.equal(claudeBridge.filePrefix, 'dwf-'); + assert.equal(claudeBridge.fileExtension, '.md'); + }); + + it('generates one file per scope', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Use kebab-case.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + + assert.equal(output.size, 2); + assert.ok(output.has('.claude/rules/dwf-architecture.md')); + assert.ok(output.has('.claude/rules/dwf-conventions.md')); + }); + + it('includes generated comment in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-architecture.md') ?? ''; + + assert.ok(content.includes('')); + }); + + it('includes scope heading in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Use kebab-case.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + + const archContent = output.get('.claude/rules/dwf-architecture.md') ?? ''; + assert.ok(archContent.includes('# Architecture')); + assert.ok(archContent.includes('- Use named exports.')); + + const convContent = output.get('.claude/rules/dwf-conventions.md') ?? ''; + assert.ok(convContent.includes('# Conventions')); + assert.ok(convContent.includes('- Use kebab-case.')); + }); + + it('does not include frontmatter when no metadata paths', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-architecture.md') ?? ''; + + assert.ok(!content.includes('---')); + assert.ok(!content.includes('paths:')); + }); + + it('includes paths frontmatter when metadata has paths', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { paths: ['src/'] }, + }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-conventions.md') ?? ''; + + assert.ok(content.includes('---')); + assert.ok(content.includes('paths:')); + assert.ok(content.includes(' - "src/"')); + }); + + it('handles scope with colon (custom scope)', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'team:payments', content: 'No raw SQL.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + + assert.ok(output.has('.claude/rules/dwf-team-payments.md')); + const content = output.get('.claude/rules/dwf-team-payments.md') ?? ''; + assert.ok(content.includes('# team:payments')); + assert.ok(content.includes('- No raw SQL.')); + }); + + it('filters out info and disabled rules', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Keep this.' }), + makeRule({ id: 'rule-b', scope: 'architecture', severity: 'info', content: 'Skip info.' }), + makeRule({ id: 'rule-c', scope: 'architecture', enabled: false, content: 'Skip disabled.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-architecture.md') ?? ''; + + assert.ok(content.includes('Keep this.')); + assert.ok(!content.includes('Skip info.')); + assert.ok(!content.includes('Skip disabled.')); + }); + + it('returns empty Map when no rules are enabled', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', enabled: false, content: 'Disabled.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + + assert.equal(output.size, 0); + }); + + it('sorts scopes: built-in first, then custom alphabetically', () => { + const rules = [ + makeRule({ id: 'rule-z', scope: 'team:payments', content: 'No raw SQL.' }), + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Kebab case.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const keys = [...output.keys()]; + + assert.equal(keys[0], '.claude/rules/dwf-architecture.md'); + assert.equal(keys[1], '.claude/rules/dwf-conventions.md'); + assert.equal(keys[2], '.claude/rules/dwf-team-payments.md'); + }); + + it('handles multiple paths in metadata', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { paths: ['src/', 'lib/'] }, + }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-conventions.md') ?? ''; + + assert.ok(content.includes(' - "src/"')); + assert.ok(content.includes(' - "lib/"')); + }); + + it('ignores empty paths array in metadata', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'architecture', + content: 'Content.', + metadata: { paths: [] }, + }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-architecture.md') ?? ''; + + assert.ok(!content.includes('---')); + assert.ok(!content.includes('paths:')); + }); +}); diff --git a/packages/cli/tests/bridges/cursor.test.ts b/packages/cli/tests/bridges/cursor.test.ts new file mode 100644 index 0000000..4bd32d6 --- /dev/null +++ b/packages/cli/tests/bridges/cursor.test.ts @@ -0,0 +1,188 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { cursorBridge } from '../../src/bridges/cursor.js'; +import type { Rule, ProjectConfig } from '../../src/bridges/types.js'; + +function makeRule(overrides: Partial = {}): Rule { + return { + id: 'test-rule', + scope: 'architecture', + severity: 'error', + content: 'Test content', + enabled: true, + ...overrides, + }; +} + +const CONFIG: ProjectConfig = { + version: '0.1', + project: { name: 'test' }, + tools: ['cursor'], + mode: 'copy', + blocks: [], + pulled: [], + assets: [], +}; + +describe('cursorBridge', () => { + it('has correct id', () => { + assert.equal(cursorBridge.id, 'cursor'); + }); + + it('has kind directory', () => { + assert.equal(cursorBridge.kind, 'directory'); + }); + + it('has correct output directory config', () => { + assert.equal(cursorBridge.outputDir, '.cursor/rules'); + assert.equal(cursorBridge.filePrefix, 'dwf-'); + assert.equal(cursorBridge.fileExtension, '.mdc'); + }); + + it('generates one file per scope', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Use kebab-case.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + + assert.equal(output.size, 2); + assert.ok(output.has('.cursor/rules/dwf-architecture.mdc')); + assert.ok(output.has('.cursor/rules/dwf-conventions.mdc')); + }); + + it('includes generated comment in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-architecture.mdc') ?? ''; + + assert.ok(content.includes('')); + }); + + it('includes default frontmatter with alwaysApply: true when no globs', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-architecture.mdc') ?? ''; + + assert.ok(content.includes('---')); + assert.ok(content.includes('description: Architecture rules generated by dev-workflows')); + assert.ok(content.includes('alwaysApply: true')); + assert.ok(content.includes('globs:')); + }); + + it('includes globs frontmatter and alwaysApply: false when globs specified', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { globs: ['**/*.ts', '**/*.tsx'] }, + }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-conventions.mdc') ?? ''; + + assert.ok(content.includes('globs: ["**/*.ts", "**/*.tsx"]')); + assert.ok(content.includes('alwaysApply: false')); + }); + + it('includes scope heading in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-architecture.mdc') ?? ''; + + assert.ok(content.includes('# Architecture')); + assert.ok(content.includes('- Use named exports.')); + }); + + it('handles scope with colon (custom scope)', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'team:payments', content: 'No raw SQL.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + + assert.ok(output.has('.cursor/rules/dwf-team-payments.mdc')); + const content = output.get('.cursor/rules/dwf-team-payments.mdc') ?? ''; + assert.ok(content.includes('# team:payments')); + assert.ok(content.includes('- No raw SQL.')); + }); + + it('filters out info and disabled rules', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Keep this.' }), + makeRule({ id: 'rule-b', scope: 'architecture', severity: 'info', content: 'Skip info.' }), + makeRule({ id: 'rule-c', scope: 'architecture', enabled: false, content: 'Skip disabled.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-architecture.mdc') ?? ''; + + assert.ok(content.includes('Keep this.')); + assert.ok(!content.includes('Skip info.')); + assert.ok(!content.includes('Skip disabled.')); + }); + + it('returns empty Map when no rules are enabled', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', enabled: false, content: 'Disabled.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + + assert.equal(output.size, 0); + }); + + it('sorts scopes: built-in first, then custom alphabetically', () => { + const rules = [ + makeRule({ id: 'rule-z', scope: 'team:payments', content: 'No raw SQL.' }), + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Kebab case.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const keys = [...output.keys()]; + + assert.equal(keys[0], '.cursor/rules/dwf-architecture.mdc'); + assert.equal(keys[1], '.cursor/rules/dwf-conventions.mdc'); + assert.equal(keys[2], '.cursor/rules/dwf-team-payments.mdc'); + }); + + it('generates correct description for custom scopes', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'team:payments', content: 'No raw SQL.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-team-payments.mdc') ?? ''; + + assert.ok(content.includes('description: team:payments rules generated by dev-workflows')); + }); + + it('ignores empty globs array in metadata', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'architecture', + content: 'Content.', + metadata: { globs: [] }, + }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-architecture.mdc') ?? ''; + + assert.ok(content.includes('alwaysApply: true')); + }); +}); diff --git a/packages/cli/tests/bridges/windsurf.test.ts b/packages/cli/tests/bridges/windsurf.test.ts index 1db236f..c5ba326 100644 --- a/packages/cli/tests/bridges/windsurf.test.ts +++ b/packages/cli/tests/bridges/windsurf.test.ts @@ -39,55 +39,132 @@ describe('windsurfBridge', () => { assert.equal(windsurfBridge.fileExtension, '.md'); }); - it('generates correct markdown output', () => { + it('generates one file per scope', () => { const rules = [ makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), makeRule({ id: 'rule-b', scope: 'conventions', content: 'Use kebab-case.' }), ]; const output = windsurfBridge.compile(rules, CONFIG); - const content = output.get('.windsurf/rules/devworkflows.md'); - assert.ok(content); + assert.equal(output.size, 2); + assert.ok(output.has('.windsurf/rules/dwf-architecture.md')); + assert.ok(output.has('.windsurf/rules/dwf-conventions.md')); + }); + + it('includes generated comment in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const content = output.get('.windsurf/rules/dwf-architecture.md') ?? ''; + assert.ok(content.includes('')); - assert.ok(content.includes('# Project Rules')); - assert.ok(content.includes('## Architecture')); - assert.ok(content.includes('- Use named exports.')); - assert.ok(content.includes('## Conventions')); - assert.ok(content.includes('- Use kebab-case.')); }); - it('sorts scopes: built-in first, then custom alphabetically', () => { + it('includes default frontmatter with trigger: always', () => { const rules = [ - makeRule({ id: 'rule-z', scope: 'team:payments', content: 'No raw SQL.' }), - makeRule({ id: 'rule-a', scope: 'architecture', content: 'Named exports.' }), - makeRule({ id: 'rule-c', scope: 'agent:reviewer', content: 'Review carefully.' }), - makeRule({ id: 'rule-b', scope: 'conventions', content: 'Kebab case.' }), + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const content = output.get('.windsurf/rules/dwf-architecture.md') ?? ''; + + assert.ok(content.includes('---')); + assert.ok(content.includes('trigger: always')); + }); + + it('uses metadata trigger when specified', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { trigger: 'manual' }, + }), ]; const output = windsurfBridge.compile(rules, CONFIG); - const content = output.get('.windsurf/rules/devworkflows.md') ?? ''; + const content = output.get('.windsurf/rules/dwf-conventions.md') ?? ''; + + assert.ok(content.includes('trigger: manual')); + }); - const archIndex = content.indexOf('## Architecture'); - const convIndex = content.indexOf('## Conventions'); - const agentIndex = content.indexOf('## agent:reviewer'); - const teamIndex = content.indexOf('## team:payments'); + it('includes globs when trigger is glob and globs are provided', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { trigger: 'glob', globs: ['**/*.ts', '**/*.tsx'] }, + }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const content = output.get('.windsurf/rules/dwf-conventions.md') ?? ''; - assert.ok(archIndex < convIndex, 'Architecture should come before Conventions'); - assert.ok(convIndex < agentIndex, 'Conventions should come before agent:reviewer'); - assert.ok(agentIndex < teamIndex, 'agent:reviewer should come before team:payments'); + assert.ok(content.includes('trigger: glob')); + assert.ok(content.includes('globs: ["**/*.ts", "**/*.tsx"]')); }); - it('renders custom scopes without capitalization', () => { + it('does not include globs when trigger is always even if globs present', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { trigger: 'always', globs: ['**/*.ts'] }, + }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const content = output.get('.windsurf/rules/dwf-conventions.md') ?? ''; + + assert.ok(content.includes('trigger: always')); + assert.ok(!content.includes('globs:')); + }); + + it('includes scope heading in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const content = output.get('.windsurf/rules/dwf-architecture.md') ?? ''; + + assert.ok(content.includes('# Architecture')); + assert.ok(content.includes('- Use named exports.')); + }); + + it('handles scope with colon (custom scope)', () => { const rules = [ makeRule({ id: 'rule-a', scope: 'team:payments', content: 'No raw SQL.' }), ]; const output = windsurfBridge.compile(rules, CONFIG); - const content = output.get('.windsurf/rules/devworkflows.md') ?? ''; - assert.ok(content.includes('## team:payments')); - assert.ok(!content.includes('## Team:payments')); + assert.ok(output.has('.windsurf/rules/dwf-team-payments.md')); + const content = output.get('.windsurf/rules/dwf-team-payments.md') ?? ''; + assert.ok(content.includes('# team:payments')); + assert.ok(content.includes('- No raw SQL.')); + }); + + it('sorts scopes: built-in first, then custom alphabetically', () => { + const rules = [ + makeRule({ id: 'rule-z', scope: 'team:payments', content: 'No raw SQL.' }), + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Named exports.' }), + makeRule({ id: 'rule-c', scope: 'agent:reviewer', content: 'Review carefully.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Kebab case.' }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const keys = [...output.keys()]; + + assert.equal(keys[0], '.windsurf/rules/dwf-architecture.md'); + assert.equal(keys[1], '.windsurf/rules/dwf-conventions.md'); + assert.equal(keys[2], '.windsurf/rules/dwf-agent-reviewer.md'); + assert.equal(keys[3], '.windsurf/rules/dwf-team-payments.md'); }); it('filters out info and disabled rules', () => { @@ -98,10 +175,20 @@ describe('windsurfBridge', () => { ]; const output = windsurfBridge.compile(rules, CONFIG); - const content = output.get('.windsurf/rules/devworkflows.md') ?? ''; + const content = output.get('.windsurf/rules/dwf-architecture.md') ?? ''; assert.ok(content.includes('Keep this.')); assert.ok(!content.includes('Skip info.')); assert.ok(!content.includes('Skip disabled.')); }); + + it('returns empty Map when no rules are enabled', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', enabled: false, content: 'Disabled.' }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + + assert.equal(output.size, 0); + }); }); From 131e2501999652331eec2fbc650bfe515bd89f08 Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:22:50 +0200 Subject: [PATCH 06/18] test(bridges): verify Copilot and Gemini MarkerBridge behavior preserved Add type guard assertions (isMarkerBridge/isDirectoryBridge) to existing Copilot tests. Create full Gemini test suite verifying MarkerBridge type, output paths, markers, scope sorting, and filtering. --- packages/cli/tests/bridges/copilot.test.ts | 6 ++ packages/cli/tests/bridges/gemini.test.ts | 111 +++++++++++++++++++++ 2 files changed, 117 insertions(+) create mode 100644 packages/cli/tests/bridges/gemini.test.ts diff --git a/packages/cli/tests/bridges/copilot.test.ts b/packages/cli/tests/bridges/copilot.test.ts index 674f6a9..797a214 100644 --- a/packages/cli/tests/bridges/copilot.test.ts +++ b/packages/cli/tests/bridges/copilot.test.ts @@ -1,6 +1,7 @@ import { describe, it } from 'node:test'; import assert from 'node:assert/strict'; import { copilotBridge } from '../../src/bridges/copilot.js'; +import { isMarkerBridge, isDirectoryBridge } from '../../src/bridges/types.js'; import type { Rule, ProjectConfig } from '../../src/bridges/types.js'; function makeRule(overrides: Partial = {}): Rule { @@ -88,6 +89,11 @@ describe('copilotBridge', () => { assert.ok(!content.includes('## Team:payments')); }); + it('is identified as MarkerBridge by type guard', () => { + assert.equal(isMarkerBridge(copilotBridge), true); + assert.equal(isDirectoryBridge(copilotBridge), false); + }); + it('filters out info and disabled rules', () => { const rules = [ makeRule({ id: 'rule-a', scope: 'architecture', content: 'Keep this.' }), diff --git a/packages/cli/tests/bridges/gemini.test.ts b/packages/cli/tests/bridges/gemini.test.ts new file mode 100644 index 0000000..3b7a843 --- /dev/null +++ b/packages/cli/tests/bridges/gemini.test.ts @@ -0,0 +1,111 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { geminiBridge } from '../../src/bridges/gemini.js'; +import { isMarkerBridge, isDirectoryBridge } from '../../src/bridges/types.js'; +import type { Rule, ProjectConfig } from '../../src/bridges/types.js'; + +function makeRule(overrides: Partial = {}): Rule { + return { + id: 'test-rule', + scope: 'architecture', + severity: 'error', + content: 'Test content', + enabled: true, + ...overrides, + }; +} + +const CONFIG: ProjectConfig = { + version: '0.1', + project: { name: 'test' }, + tools: ['gemini'], + mode: 'copy', + blocks: [], + pulled: [], + assets: [], +}; + +describe('geminiBridge', () => { + it('has correct id', () => { + assert.equal(geminiBridge.id, 'gemini'); + }); + + it('has kind marker', () => { + assert.equal(geminiBridge.kind, 'marker'); + }); + + it('has correct output path', () => { + assert.deepEqual(geminiBridge.outputPaths, ['GEMINI.md']); + }); + + it('uses markers', () => { + assert.equal(geminiBridge.usesMarkers, true); + }); + + it('is identified as MarkerBridge by type guard', () => { + assert.equal(isMarkerBridge(geminiBridge), true); + assert.equal(isDirectoryBridge(geminiBridge), false); + }); + + it('generates correct markdown output', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Use kebab-case.' }), + ]; + + const output = geminiBridge.compile(rules, CONFIG); + const content = output.get('GEMINI.md'); + + assert.ok(content); + assert.ok(content.includes('# Project Rules')); + assert.ok(content.includes('## Architecture')); + assert.ok(content.includes('- Use named exports.')); + assert.ok(content.includes('## Conventions')); + assert.ok(content.includes('- Use kebab-case.')); + }); + + it('sorts scopes: built-in first, then custom alphabetically', () => { + const rules = [ + makeRule({ id: 'rule-z', scope: 'team:payments', content: 'No raw SQL.' }), + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Kebab case.' }), + ]; + + const output = geminiBridge.compile(rules, CONFIG); + const content = output.get('GEMINI.md') ?? ''; + + const archIndex = content.indexOf('## Architecture'); + const convIndex = content.indexOf('## Conventions'); + const teamIndex = content.indexOf('## team:payments'); + + assert.ok(archIndex < convIndex, 'Architecture should come before Conventions'); + assert.ok(convIndex < teamIndex, 'Conventions should come before team:payments'); + }); + + it('renders custom scopes without capitalization', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'team:payments', content: 'No raw SQL.' }), + ]; + + const output = geminiBridge.compile(rules, CONFIG); + const content = output.get('GEMINI.md') ?? ''; + + assert.ok(content.includes('## team:payments')); + assert.ok(!content.includes('## Team:payments')); + }); + + it('filters out info and disabled rules', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Keep this.' }), + makeRule({ id: 'rule-b', scope: 'architecture', severity: 'info', content: 'Skip info.' }), + makeRule({ id: 'rule-c', scope: 'architecture', enabled: false, content: 'Skip disabled.' }), + ]; + + const output = geminiBridge.compile(rules, CONFIG); + const content = output.get('GEMINI.md') ?? ''; + + assert.ok(content.includes('Keep this.')); + assert.ok(!content.includes('Skip info.')); + assert.ok(!content.includes('Skip disabled.')); + }); +}); From b9b233b8ec5ced1578bdbeb914f0f3e9270c89a9 Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:22:57 +0200 Subject: [PATCH 07/18] fix(tests): update existing tests for multi-file bridge output Update compile, e2e, and output format tests to expect the new multi-file paths (.claude/rules/dwf-{scope}.md) instead of the old single-file CLAUDE.md output. --- packages/cli/tests/commands/compile.test.ts | 2 +- packages/cli/tests/e2e/cli.test.ts | 4 ++-- packages/cli/tests/ui/output.test.ts | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/cli/tests/commands/compile.test.ts b/packages/cli/tests/commands/compile.test.ts index dc5e642..007669b 100644 --- a/packages/cli/tests/commands/compile.test.ts +++ b/packages/cli/tests/commands/compile.test.ts @@ -65,7 +65,7 @@ describe('executePipeline', () => { const claudeResult = result.results.find((r) => r.bridgeId === 'claude'); assert.ok(claudeResult); assert.equal(claudeResult.success, true); - assert.ok(await fileExists(join(tmpDir, 'CLAUDE.md'))); + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); const cursorResult = result.results.find((r) => r.bridgeId === 'cursor'); assert.ok(cursorResult); diff --git a/packages/cli/tests/e2e/cli.test.ts b/packages/cli/tests/e2e/cli.test.ts index 1663341..75cf3f6 100644 --- a/packages/cli/tests/e2e/cli.test.ts +++ b/packages/cli/tests/e2e/cli.test.ts @@ -99,8 +99,8 @@ rules: assert.equal(result.exitCode, 0); assert.ok(result.stdout.includes('Compiled')); - const claudeMd = await readFile(join(tmpDir, 'CLAUDE.md'), 'utf-8'); - assert.ok(claudeMd.includes('# Project Rules')); + const claudeMd = await readFile(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'), 'utf-8'); + assert.ok(claudeMd.includes('# Conventions')); assert.ok(claudeMd.includes('Always test your code.')); }); diff --git a/packages/cli/tests/ui/output.test.ts b/packages/cli/tests/ui/output.test.ts index 287f28c..8721d2c 100644 --- a/packages/cli/tests/ui/output.test.ts +++ b/packages/cli/tests/ui/output.test.ts @@ -102,7 +102,7 @@ describe('output format: compile', () => { const result = await run(['compile'], tmpDir); assert.ok(result.stdout.includes('\u203A'), 'should have bullet prefix'); - assert.ok(result.stdout.includes('CLAUDE.md'), 'should list CLAUDE.md'); + assert.ok(result.stdout.includes('.claude/rules/dwf-conventions.md'), 'should list .claude/rules/dwf-conventions.md'); }); }); From 9392891860f39d385e9b303c73be13d4ca9a9dee Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:45:15 +0200 Subject: [PATCH 08/18] fix(tests): add missing global property to ProjectConfig in test fixtures The ProjectConfig interface gained a required `global` field in task 1.1 but existing test fixtures were not updated, causing TypeScript compilation failures in the test tsconfig. --- packages/cli/tests/bridges/claude.test.ts | 1 + packages/cli/tests/bridges/copilot.test.ts | 1 + packages/cli/tests/bridges/cursor.test.ts | 1 + packages/cli/tests/bridges/gemini.test.ts | 1 + packages/cli/tests/bridges/windsurf.test.ts | 1 + packages/cli/tests/commands/doctor.test.ts | 6 ++++++ packages/cli/tests/core/assets.test.ts | 1 + 7 files changed, 12 insertions(+) diff --git a/packages/cli/tests/bridges/claude.test.ts b/packages/cli/tests/bridges/claude.test.ts index 9b425f7..cc289fe 100644 --- a/packages/cli/tests/bridges/claude.test.ts +++ b/packages/cli/tests/bridges/claude.test.ts @@ -22,6 +22,7 @@ const CONFIG: ProjectConfig = { blocks: [], pulled: [], assets: [], + global: true, }; describe('claudeBridge', () => { diff --git a/packages/cli/tests/bridges/copilot.test.ts b/packages/cli/tests/bridges/copilot.test.ts index 797a214..2b04333 100644 --- a/packages/cli/tests/bridges/copilot.test.ts +++ b/packages/cli/tests/bridges/copilot.test.ts @@ -23,6 +23,7 @@ const CONFIG: ProjectConfig = { blocks: [], pulled: [], assets: [], + global: true, }; describe('copilotBridge', () => { diff --git a/packages/cli/tests/bridges/cursor.test.ts b/packages/cli/tests/bridges/cursor.test.ts index 4bd32d6..66ca3c7 100644 --- a/packages/cli/tests/bridges/cursor.test.ts +++ b/packages/cli/tests/bridges/cursor.test.ts @@ -22,6 +22,7 @@ const CONFIG: ProjectConfig = { blocks: [], pulled: [], assets: [], + global: true, }; describe('cursorBridge', () => { diff --git a/packages/cli/tests/bridges/gemini.test.ts b/packages/cli/tests/bridges/gemini.test.ts index 3b7a843..02999b7 100644 --- a/packages/cli/tests/bridges/gemini.test.ts +++ b/packages/cli/tests/bridges/gemini.test.ts @@ -23,6 +23,7 @@ const CONFIG: ProjectConfig = { blocks: [], pulled: [], assets: [], + global: true, }; describe('geminiBridge', () => { diff --git a/packages/cli/tests/bridges/windsurf.test.ts b/packages/cli/tests/bridges/windsurf.test.ts index c5ba326..0327ba6 100644 --- a/packages/cli/tests/bridges/windsurf.test.ts +++ b/packages/cli/tests/bridges/windsurf.test.ts @@ -22,6 +22,7 @@ const CONFIG: ProjectConfig = { blocks: [], pulled: [], assets: [], + global: true, }; describe('windsurfBridge', () => { diff --git a/packages/cli/tests/commands/doctor.test.ts b/packages/cli/tests/commands/doctor.test.ts index be10ad3..c029be9 100644 --- a/packages/cli/tests/commands/doctor.test.ts +++ b/packages/cli/tests/commands/doctor.test.ts @@ -214,6 +214,7 @@ blocks: [] blocks: [], pulled: [], assets: [], + global: true, }; const result = checkBridgesAvailable(config); @@ -229,6 +230,7 @@ blocks: [] blocks: [], pulled: [], assets: [], + global: true, }; const result = checkBridgesAvailable(config); @@ -244,6 +246,7 @@ blocks: [] blocks: [], pulled: [], assets: [], + global: true, }; const result = checkBridgesAvailable(config); @@ -262,6 +265,7 @@ blocks: [] blocks: [], pulled: [], assets: [], + global: true, }; const result = await checkSymlinks(tmpDir, config); @@ -278,6 +282,7 @@ blocks: [] blocks: [], pulled: [], assets: [], + global: true, }; // Create a target file and a symlink pointing to it @@ -301,6 +306,7 @@ blocks: [] blocks: [], pulled: [], assets: [], + global: true, }; // Create a symlink pointing to a non-existent target diff --git a/packages/cli/tests/core/assets.test.ts b/packages/cli/tests/core/assets.test.ts index d2c02e5..80c290c 100644 --- a/packages/cli/tests/core/assets.test.ts +++ b/packages/cli/tests/core/assets.test.ts @@ -24,6 +24,7 @@ const CONFIG: ProjectConfig = { blocks: [], pulled: [], assets: [], + global: true, }; let tmpDir: string; From 7775e1f157a4d44a5c32af78c9c627a2110c8c5c Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:45:36 +0200 Subject: [PATCH 09/18] feat(core): add config version validation and scope metadata parsing Add validation that rejects unsupported config versions (only 0.1/0.2 accepted). Import VALID_CONFIG_VERSIONS into parser for validation. Scope metadata parsing was already implemented in tasks 1.1-1.9. --- packages/cli/src/core/parser.ts | 91 +++++++++++++++++++++++++++++---- packages/cli/src/core/schema.ts | 52 +++++++++++++++++++ 2 files changed, 132 insertions(+), 11 deletions(-) diff --git a/packages/cli/src/core/parser.ts b/packages/cli/src/core/parser.ts index b18cf71..1b6d6ea 100644 --- a/packages/cli/src/core/parser.ts +++ b/packages/cli/src/core/parser.ts @@ -1,9 +1,9 @@ import { readFile, readdir } from 'node:fs/promises'; import { join } from 'node:path'; import { parse } from 'yaml'; -import type { Rule, ProjectConfig, PulledEntry, AssetEntry, AssetType } from '../bridges/types.js'; +import type { Rule, ProjectConfig, PulledEntry, AssetEntry, AssetType, ScopeMetadata } from '../bridges/types.js'; import { ASSET_TYPE } from '../bridges/types.js'; -import { isValidScope } from './schema.js'; +import { isValidScope, validateScopeMetadata, VALID_CONFIG_VERSIONS } from './schema.js'; interface RawRule { id?: string; @@ -17,6 +17,10 @@ interface RawRule { interface RawRuleFile { scope?: string; + metadata?: Record; + globs?: unknown; + paths?: unknown; + trigger?: unknown; rules?: RawRule[]; } @@ -33,6 +37,11 @@ export async function readConfig(cwd: string): Promise { const version = typeof doc['version'] === 'string' ? doc['version'] : '0.1'; + const validVersions = VALID_CONFIG_VERSIONS as readonly string[]; + if (!validVersions.includes(version)) { + throw new Error(`Invalid config.yml: unsupported version "${version}". Supported versions: ${VALID_CONFIG_VERSIONS.join(', ')}`); + } + const projectRaw = doc['project']; if (!projectRaw || typeof projectRaw !== 'object') { throw new Error('Invalid config.yml: missing "project" section'); @@ -83,6 +92,9 @@ export async function readConfig(cwd: string): Promise { .filter((a) => a.name !== '' && assetTypeValues.has(a.type)) : []; + const globalRaw = doc['global']; + const global = typeof globalRaw === 'boolean' ? globalRaw : true; + return { version, project: { name: projectName, description: projectDescription }, @@ -91,14 +103,19 @@ export async function readConfig(cwd: string): Promise { blocks, pulled, assets, + global, }; } -function normalizeRule(raw: RawRule, scope: string): Rule | null { - if (!raw.id || !raw.content) return null; +function normalizeRule(raw: RawRule, scope: string, scopeMetadata?: ScopeMetadata): Rule | null { + if (!raw.id || !raw.content) { + return null; + } const severity = raw.severity ?? 'error'; - if (severity !== 'error' && severity !== 'warning' && severity !== 'info') return null; + if (severity !== 'error' && severity !== 'warning' && severity !== 'info') { + return null; + } const enabled = raw.enabled !== false; @@ -111,12 +128,54 @@ function normalizeRule(raw: RawRule, scope: string): Rule | null { enabled, sourceBlock: raw.sourceBlock, source: raw.source, + metadata: scopeMetadata, }; } +function extractScopeMetadata(doc: RawRuleFile, file: string): ScopeMetadata | undefined { + // Support both nested metadata block and top-level fields + const metadataRaw: Record = {}; + + if (doc.metadata && typeof doc.metadata === 'object') { + Object.assign(metadataRaw, doc.metadata); + } + + // Top-level fields take precedence over nested metadata block + if (doc.globs !== undefined) { + metadataRaw['globs'] = doc.globs; + } + if (doc.paths !== undefined) { + metadataRaw['paths'] = doc.paths; + } + if (doc.trigger !== undefined) { + metadataRaw['trigger'] = doc.trigger; + } + + if (Object.keys(metadataRaw).length === 0) { + return undefined; + } + + const { metadata, errors } = validateScopeMetadata(metadataRaw); + + for (const error of errors) { + console.warn(`Warning: ${error.field} in ${file}: ${error.message}`); + } + + if (errors.length > 0) { + return undefined; + } + + return metadata; +} + export async function readRules(cwd: string): Promise { const rulesDir = join(cwd, '.dwf', 'rules'); - const entries = await readdir(rulesDir); + let entries: string[]; + try { + entries = await readdir(rulesDir); + } catch { + return []; + } const ymlFiles = entries.filter((f) => f.endsWith('.yml') || f.endsWith('.yaml')); const allRules: Rule[] = []; @@ -125,22 +184,32 @@ export async function readRules(cwd: string): Promise { const raw = await readFile(join(rulesDir, file), 'utf-8'); const parsed: unknown = parse(raw); - if (!parsed || typeof parsed !== 'object') continue; + if (!parsed || typeof parsed !== 'object') { + continue; + } const doc = parsed as RawRuleFile; const scope = doc.scope ?? file.replace(/\.ya?ml$/, ''); - if (!Array.isArray(doc.rules)) continue; + if (!Array.isArray(doc.rules)) { + continue; + } if (!isValidScope(scope)) { console.warn(`Warning: invalid scope "${scope}" in ${file}, skipping rules`); continue; } + const scopeMetadata = extractScopeMetadata(doc, file); + for (const rawRule of doc.rules) { - if (!rawRule || typeof rawRule !== 'object') continue; - const rule = normalizeRule(rawRule, scope); - if (rule) allRules.push(rule); + if (!rawRule || typeof rawRule !== 'object') { + continue; + } + const rule = normalizeRule(rawRule, scope, scopeMetadata); + if (rule) { + allRules.push(rule); + } } } diff --git a/packages/cli/src/core/schema.ts b/packages/cli/src/core/schema.ts index dce920e..0b7dc6f 100644 --- a/packages/cli/src/core/schema.ts +++ b/packages/cli/src/core/schema.ts @@ -1,3 +1,5 @@ +import type { ScopeMetadata } from '../bridges/types.js'; + export const SCOPE_REGEX = /^[a-z][a-z0-9]*(?::[a-z][a-z0-9-]*)?$/; export const BUILTIN_SCOPES = ['architecture', 'conventions', 'security', 'workflow', 'testing'] as const; @@ -6,6 +8,11 @@ export type BuiltinScope = (typeof BUILTIN_SCOPES)[number]; export const VALID_TOOL_IDS = ['claude', 'cursor', 'gemini', 'windsurf', 'copilot'] as const; export type ValidToolId = (typeof VALID_TOOL_IDS)[number]; +export const VALID_TRIGGERS = ['always', 'glob', 'manual'] as const; +export type ValidTrigger = (typeof VALID_TRIGGERS)[number]; + +export const VALID_CONFIG_VERSIONS = ['0.1', '0.2'] as const; + export function isValidScope(scope: string): boolean { return SCOPE_REGEX.test(scope); } @@ -13,3 +20,48 @@ export function isValidScope(scope: string): boolean { export function isBuiltinScope(scope: string): scope is BuiltinScope { return (BUILTIN_SCOPES as readonly string[]).includes(scope); } + +export function isValidTrigger(value: string): value is ValidTrigger { + return (VALID_TRIGGERS as readonly string[]).includes(value); +} + +export interface ScopeMetadataValidationError { + field: string; + message: string; +} + +export function validateScopeMetadata(raw: Record): { metadata: ScopeMetadata | undefined; errors: ScopeMetadataValidationError[] } { + const errors: ScopeMetadataValidationError[] = []; + const metadata: ScopeMetadata = {}; + let hasMetadata = false; + + if ('globs' in raw && raw['globs'] !== undefined) { + if (!Array.isArray(raw['globs']) || !raw['globs'].every((g): g is string => typeof g === 'string')) { + errors.push({ field: 'globs', message: 'globs must be an array of strings' }); + } else { + metadata.globs = raw['globs']; + hasMetadata = true; + } + } + + if ('paths' in raw && raw['paths'] !== undefined) { + if (!Array.isArray(raw['paths']) || !raw['paths'].every((p): p is string => typeof p === 'string')) { + errors.push({ field: 'paths', message: 'paths must be an array of strings' }); + } else { + metadata.paths = raw['paths']; + hasMetadata = true; + } + } + + if ('trigger' in raw && raw['trigger'] !== undefined) { + const triggerVal = String(raw['trigger']); + if (!isValidTrigger(triggerVal)) { + errors.push({ field: 'trigger', message: `trigger must be one of: ${VALID_TRIGGERS.join(', ')}. Got "${triggerVal}"` }); + } else { + metadata.trigger = triggerVal; + hasMetadata = true; + } + } + + return { metadata: hasMetadata ? metadata : undefined, errors }; +} From 83d6ef652a66180c73bf0acf2601c7b023327ceb Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:46:00 +0200 Subject: [PATCH 10/18] feat(compile): integrate DirectoryBridge/MarkerBridge pipeline with stale cleanup and legacy migration Rewrite executePipeline to discriminate between DirectoryBridge and MarkerBridge flows. DirectoryBridges write one file per scope then run cleanStaleFiles to remove orphaned dwf-* files. MarkerBridges continue using marker-based merge. Legacy migration runs once before writing new files. Dry-run shows summary of what would be generated. --- packages/cli/src/commands/compile.ts | 197 +++++++++++++++++++++------ 1 file changed, 158 insertions(+), 39 deletions(-) diff --git a/packages/cli/src/commands/compile.ts b/packages/cli/src/commands/compile.ts index e87bf5b..c8dbe93 100644 --- a/packages/cli/src/commands/compile.ts +++ b/packages/cli/src/commands/compile.ts @@ -5,14 +5,16 @@ import chalk from 'chalk'; import { readConfig, readRules } from '../core/parser.js'; import { computeRulesHash, writeHash } from '../core/hash.js'; import { deployAssets } from '../core/assets.js'; -import type { Bridge } from '../bridges/types.js'; -import { isMarkerBridge, getBridgeOutputPaths } from '../bridges/types.js'; +import type { Bridge, DirectoryBridge } from '../bridges/types.js'; +import { isDirectoryBridge, getBridgeOutputPaths } from '../bridges/types.js'; import { claudeBridge } from '../bridges/claude.js'; import { cursorBridge } from '../bridges/cursor.js'; import { geminiBridge } from '../bridges/gemini.js'; import { windsurfBridge } from '../bridges/windsurf.js'; import { copilotBridge } from '../bridges/copilot.js'; import { mergeMarkedContent, removeMarkedBlock } from '../core/markers.js'; +import { cleanStaleFiles } from '../core/scope-filename.js'; +import { detectLegacyFiles, migrateLegacyFiles } from '../core/cleanup.js'; import { fileExists } from '../utils/fs.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; @@ -31,11 +33,22 @@ export interface BridgeResult { content?: string; } +export interface StaleFileResult { + bridgeId: string; + deleted: string[]; +} + +export interface MigrationResult { + actions: string[]; +} + export interface CompileResult { results: BridgeResult[]; activeRuleCount: number; assetPaths: string[]; elapsedMs: number; + staleResults: StaleFileResult[]; + migration: MigrationResult; } export interface PipelineOptions { @@ -50,6 +63,25 @@ function getBridge(id: string): Bridge | undefined { return BRIDGES.find((b) => b.id === id); } +function extractFilenameFromPath(relativePath: string): string { + const parts = relativePath.split('/'); + return parts[parts.length - 1] ?? relativePath; +} + +async function handleDirectoryBridgeCleanup( + cwd: string, + bridge: DirectoryBridge, + writtenFilenames: Set, + write: boolean, +): Promise { + if (!write) { + return []; + } + + const outputDir = join(cwd, bridge.outputDir); + return cleanStaleFiles(outputDir, bridge.filePrefix, bridge.fileExtension, writtenFilenames); +} + export async function executePipeline(options: PipelineOptions): Promise { const { cwd, tool, write = true } = options; const startTime = performance.now(); @@ -65,8 +97,19 @@ export async function executePipeline(options: PipelineOptions): Promise 0) { + const actions = await migrateLegacyFiles(cwd, legacyFiles); + migration.actions = actions; + } + } + const activeRules = rules.filter((r) => r.enabled); const results: BridgeResult[] = []; + const staleResults: StaleFileResult[] = []; for (const toolId of toolIds) { const bridge = getBridge(toolId); @@ -75,14 +118,61 @@ export async function executePipeline(options: PipelineOptions): Promise 0) { + staleResults.push({ bridgeId: bridge.id, deleted }); + } + continue; + } + + const outputs = bridge.compile(rules, config); + const writtenFilenames = new Set(); + + for (const [relativePath, content] of outputs) { + writtenFilenames.add(extractFilenameFromPath(relativePath)); + + if (!write) { + results.push({ bridgeId: bridge.id, outputPath: relativePath, success: true, content }); continue; } - if (isMarkerBridge(bridge)) { + const absolutePath = join(cwd, relativePath); + await mkdir(dirname(absolutePath), { recursive: true }); + + if (config.mode === 'link') { + const cachePath = join(cwd, '.dwf', '.cache', relativePath); + await mkdir(dirname(cachePath), { recursive: true }); + await writeFile(cachePath, content, 'utf-8'); + + if (await fileExists(absolutePath)) { + await unlink(absolutePath); + } + await symlink(cachePath, absolutePath); + } else { + await writeFile(absolutePath, content, 'utf-8'); + } + + results.push({ bridgeId: bridge.id, outputPath: relativePath, success: true }); + } + + // Stale file cleanup for DirectoryBridge + const deleted = await handleDirectoryBridgeCleanup(cwd, bridge, writtenFilenames, write); + if (deleted.length > 0) { + staleResults.push({ bridgeId: bridge.id, deleted }); + } + } else { + // MarkerBridge flow: merge content between markers in target file + if (activeRules.length === 0 && write) { + for (const relativePath of getBridgeOutputPaths(bridge)) { + const absolutePath = join(cwd, relativePath); + if (!(await fileExists(absolutePath))) { + continue; + } + const existing = await readFile(absolutePath, 'utf-8'); const cleaned = removeMarkedBlock(existing); if (cleaned.length === 0) { @@ -90,19 +180,15 @@ export async function executePipeline(options: PipelineOptions): Promise { @@ -185,16 +271,36 @@ export async function runCompile(options: CompileOptions): Promise { if (options.dryRun) { const result = await executePipeline({ cwd, tool: options.tool, write: false }); + + ui.newline(); + ui.info('Dry run — no files written'); + ui.newline(); + for (const br of result.results) { if (br.content !== undefined) { console.log(chalk.cyan(`--- ${br.outputPath} ---`)); console.log(br.content); } } + + // Summary of what would be generated + const fileCount = result.results.filter((r) => r.success).length; + ui.newline(); + ui.info(`Would generate ${String(fileCount)} file${fileCount !== 1 ? 's' : ''} from ${String(result.activeRuleCount)} rules`); return; } const result = await executePipeline({ cwd, tool: options.tool }); + + // Show migration messages if any + if (result.migration.actions.length > 0) { + ui.newline(); + ui.info('Migrating from single-file to multi-file output...'); + for (const action of result.migration.actions) { + ui.info(` ${action}`); + } + } + const writtenPaths = result.results.filter((r) => r.success).map((r) => r.outputPath); const allPaths = [...writtenPaths, ...result.assetPaths]; @@ -202,11 +308,24 @@ export async function runCompile(options: CompileOptions): Promise { ui.success(`Compiled ${String(result.activeRuleCount)} rules ${ICONS.arrow} ${String(allPaths.length)} file${allPaths.length !== 1 ? 's' : ''} ${ui.timing(result.elapsedMs)}`); ui.newline(); - if (options.verbose && result.assetPaths.length > 0) { + if (options.verbose) { ui.list(writtenPaths); - ui.newline(); - console.log(` ${chalk.dim('Assets deployed:')}`); - ui.list(result.assetPaths); + + if (result.staleResults.length > 0) { + ui.newline(); + console.log(` ${chalk.dim('Stale files removed:')}`); + for (const stale of result.staleResults) { + for (const deleted of stale.deleted) { + ui.info(` ${stale.bridgeId}: ${deleted}`); + } + } + } + + if (result.assetPaths.length > 0) { + ui.newline(); + console.log(` ${chalk.dim('Assets deployed:')}`); + ui.list(result.assetPaths); + } } else { ui.list(allPaths); } From a114df1f48b4ca871727ffc6216669670f16590b Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:46:22 +0200 Subject: [PATCH 11/18] feat(explain): update explain command for multi-file DirectoryBridge output Show per-scope file listing for DirectoryBridges with output directory pattern. Show single output file for MarkerBridges. Update mode label from 'full file' to 'multi-file (one per scope)'. Windsurf char limit check now reports per-file max instead of total. --- packages/cli/src/commands/explain.ts | 65 ++++++++++++++++++++-------- 1 file changed, 47 insertions(+), 18 deletions(-) diff --git a/packages/cli/src/commands/explain.ts b/packages/cli/src/commands/explain.ts index 9809b0b..6611d24 100644 --- a/packages/cli/src/commands/explain.ts +++ b/packages/cli/src/commands/explain.ts @@ -30,7 +30,7 @@ function getModeLabel(bridge: Bridge): string { if (isMarkerBridge(bridge)) { return 'markers (BEGIN/END)'; } - return 'full file'; + return 'multi-file (one per scope)'; } function getExcludedRules(rules: Rule[]): Array<{ id: string; reason: string }> { @@ -80,22 +80,50 @@ async function runExplain(options: ExplainOptions): Promise { for (const toolId of toolIds) { const bridge = getBridge(toolId); - if (!bridge) continue; - - const bridgePaths = getBridgeOutputPaths(bridge); - const outputPath = bridgePaths[0] ?? (isDirectoryBridge(bridge) ? `${bridge.outputDir}/${bridge.filePrefix}*${bridge.fileExtension}` : toolId); + if (!bridge) { + continue; + } console.log(` ${formatSeparator(toolId)}`); ui.newline(); - ui.keyValue('Output:', outputPath); - ui.keyValue('Mode:', getModeLabel(bridge)); - const included = filterRules(rules); - const grouped = groupByScope(included); + if (isDirectoryBridge(bridge)) { + // DirectoryBridge: show output directory and file listing + const outputPattern = `${bridge.outputDir}/${bridge.filePrefix}*${bridge.fileExtension}`; + ui.keyValue('Output:', outputPattern); + ui.keyValue('Mode:', getModeLabel(bridge)); + + const included = filterRules(rules); + const grouped = groupByScope(included); + + ui.keyValue('Rules:', `${String(included.length)} included`); + + // Show files that would be generated (one per scope) + ui.newline(); + ui.keyValue('Files:', `${String(grouped.size)} scope${grouped.size !== 1 ? 's' : ''}`); + const outputs = bridge.compile(rules, config); + for (const [filePath] of outputs) { + console.log(` ${' '.repeat(10)}${filePath}`); + } - ui.keyValue('Rules:', `${String(included.length)} included`); - for (const [scope, scopeRules] of grouped) { - console.log(` ${' '.repeat(10)}${scope}: ${String(scopeRules.length)}`); + // Show scope breakdown + for (const [scope, scopeRules] of grouped) { + console.log(` ${' '.repeat(10)} ${scope}: ${String(scopeRules.length)} rule${scopeRules.length !== 1 ? 's' : ''}`); + } + } else { + // MarkerBridge: show single output file + const bridgePaths = getBridgeOutputPaths(bridge); + const outputPath = bridgePaths[0] ?? toolId; + ui.keyValue('Output:', outputPath); + ui.keyValue('Mode:', getModeLabel(bridge)); + + const included = filterRules(rules); + const grouped = groupByScope(included); + + ui.keyValue('Rules:', `${String(included.length)} included`); + for (const [scope, scopeRules] of grouped) { + console.log(` ${' '.repeat(10)}${scope}: ${String(scopeRules.length)}`); + } } const excluded = getExcludedRules(rules); @@ -110,15 +138,16 @@ async function runExplain(options: ExplainOptions): Promise { if (bridge.id === 'windsurf') { const outputs = bridge.compile(rules, config); - let content = ''; + let maxPerFile = 0; for (const [, val] of outputs) { - content += val; + if (val.length > maxPerFile) { + maxPerFile = val.length; + } } - const charCount = content.length; - const formatted = `${String(charCount).replace(/\B(?=(\d{3})+(?!\d))/g, ',')} / ${String(WINDSURF_CHAR_LIMIT).replace(/\B(?=(\d{3})+(?!\d))/g, ',')} chars`; + const formatted = `${String(maxPerFile).replace(/\B(?=(\d{3})+(?!\d))/g, ',')} / ${String(WINDSURF_CHAR_LIMIT).replace(/\B(?=(\d{3})+(?!\d))/g, ',')} chars (per file)`; ui.newline(); - if (charCount > WINDSURF_CHAR_LIMIT) { - ui.warn(`Output size: ${formatted} (Windsurf limit)`); + if (maxPerFile > WINDSURF_CHAR_LIMIT) { + ui.warn(`Max file size: ${formatted} (Windsurf limit)`); } else { ui.keyValue('Size:', `${formatted} (Windsurf limit)`); } From e116db91944b69b6177893a84b9454b244b6bb46 Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:46:26 +0200 Subject: [PATCH 12/18] fix(types): make ProjectConfig.global required instead of optional The parser always provides a default value for global (true), so the type should reflect that it is always present after parsing. --- packages/cli/src/bridges/types.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cli/src/bridges/types.ts b/packages/cli/src/bridges/types.ts index 9bd96a3..a20b5f5 100644 --- a/packages/cli/src/bridges/types.ts +++ b/packages/cli/src/bridges/types.ts @@ -33,7 +33,7 @@ export interface ProjectConfig { blocks: string[]; pulled: PulledEntry[]; assets: AssetEntry[]; - global?: boolean; + global: boolean; } export const ASSET_TYPE = { From c619f88d2e54c83857ff89aac20e9f6c77e65290 Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 14:46:56 +0200 Subject: [PATCH 13/18] test: add comprehensive tests for Phase 1 tasks 1.10-1.18 - Parser tests: scope metadata parsing (top-level, nested, backward compat, invalid trigger/globs rejection, metadata propagation to rules) - Config tests: v0.1/v0.2 version handling, global field, version validation - Schema tests: validateScopeMetadata, isValidTrigger, VALID_CONFIG_VERSIONS - Compile integration tests: DirectoryBridge multi-file output, MarkerBridge output, mixed bridges, stale file cleanup, legacy migration, dry-run mode - Explain tests: multi-file mode label, per-scope file listing - E2E tests: multi-file compile, dry-run, explain multi-file paths - Output tests: updated mode label assertion --- packages/cli/tests/commands/compile.test.ts | 439 +++++++++++++++++++- packages/cli/tests/commands/explain.test.ts | 2 +- packages/cli/tests/core/parser.test.ts | 307 ++++++++++++++ packages/cli/tests/core/schema.test.ts | 112 ++++- packages/cli/tests/e2e/cli.test.ts | 79 ++++ packages/cli/tests/ui/output.test.ts | 2 +- 6 files changed, 924 insertions(+), 17 deletions(-) create mode 100644 packages/cli/tests/core/parser.test.ts diff --git a/packages/cli/tests/commands/compile.test.ts b/packages/cli/tests/commands/compile.test.ts index 007669b..45879ef 100644 --- a/packages/cli/tests/commands/compile.test.ts +++ b/packages/cli/tests/commands/compile.test.ts @@ -1,6 +1,6 @@ import { describe, it, beforeEach, afterEach } from 'node:test'; import assert from 'node:assert/strict'; -import { mkdtemp, mkdir, writeFile, rm, readFile, access } from 'node:fs/promises'; +import { mkdtemp, mkdir, writeFile, rm, readFile, readdir, access } from 'node:fs/promises'; import { join } from 'node:path'; import { tmpdir } from 'node:os'; import { executePipeline } from '../../src/commands/compile.js'; @@ -15,6 +15,25 @@ mode: copy blocks: [] `; +const COPILOT_CONFIG = `version: "0.1" +project: + name: "test-project" +tools: + - copilot +mode: copy +blocks: [] +`; + +const MIXED_CONFIG = `version: "0.1" +project: + name: "test-project" +tools: + - claude + - copilot +mode: copy +blocks: [] +`; + const VALID_RULES = `scope: conventions rules: - id: named-exports @@ -25,6 +44,26 @@ rules: content: Avoid barrel files. `; +const SECURITY_RULES = `scope: security +rules: + - id: no-eval + severity: error + content: Never use eval(). +`; + +const RULES_WITH_METADATA = `scope: conventions +globs: + - "**/*.ts" + - "**/*.tsx" +paths: + - "src/" +trigger: always +rules: + - id: named-exports + severity: error + content: Always use named exports. +`; + async function fileExists(filePath: string): Promise { try { await access(filePath); @@ -34,11 +73,13 @@ async function fileExists(filePath: string): Promise { } } -async function setupProject(tmpDir: string, config?: string, rules?: string): Promise { +async function setupProject(tmpDir: string, config?: string, ruleFiles?: Record): Promise { await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); await writeFile(join(tmpDir, '.dwf', 'config.yml'), config ?? VALID_CONFIG); - if (rules !== undefined) { - await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), rules); + if (ruleFiles) { + for (const [name, content] of Object.entries(ruleFiles)) { + await writeFile(join(tmpDir, '.dwf', 'rules', name), content); + } } } @@ -54,7 +95,7 @@ describe('executePipeline', () => { }); it('returns success results for all configured bridges', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); const result = await executePipeline({ cwd: tmpDir }); @@ -73,7 +114,7 @@ describe('executePipeline', () => { }); it('tool option filters to single bridge', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); const result = await executePipeline({ cwd: tmpDir, tool: 'claude' }); @@ -83,14 +124,14 @@ describe('executePipeline', () => { }); it('throws on invalid tool filter', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); await assert.rejects( () => executePipeline({ cwd: tmpDir, tool: 'noexiste' }), (err: Error) => { assert.ok(err.message.includes('not configured')); return true; - } + }, ); }); @@ -100,7 +141,7 @@ describe('executePipeline', () => { (err: Error) => { assert.ok(err.message.length > 0); return true; - } + }, ); }); @@ -109,12 +150,12 @@ describe('executePipeline', () => { await writeFile(join(tmpDir, '.dwf', 'config.yml'), ':\ninvalid: [yaml: {broken'); await assert.rejects( - () => executePipeline({ cwd: tmpDir }) + () => executePipeline({ cwd: tmpDir }), ); }); it('write: false returns content without writing files', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); const result = await executePipeline({ cwd: tmpDir, tool: 'claude', write: false }); @@ -124,11 +165,11 @@ describe('executePipeline', () => { assert.ok(claudeResult.content); assert.ok(claudeResult.content.includes('named exports')); - assert.ok(!(await fileExists(join(tmpDir, 'CLAUDE.md')))); + assert.ok(!(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md')))); }); it('writes hash file on successful compile', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); await executePipeline({ cwd: tmpDir }); @@ -139,7 +180,7 @@ describe('executePipeline', () => { }); it('does not write hash when write is false', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); await executePipeline({ cwd: tmpDir, write: false }); @@ -147,3 +188,373 @@ describe('executePipeline', () => { assert.ok(!(await fileExists(hashPath))); }); }); + +describe('executePipeline DirectoryBridge multi-file output', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-dir-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('generates multiple files for multiple scopes', async () => { + await setupProject(tmpDir, VALID_CONFIG, { + 'conventions.yml': VALID_RULES, + 'security.yml': SECURITY_RULES, + }); + + const result = await executePipeline({ cwd: tmpDir, tool: 'claude' }); + + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-security.md'))); + + const claudeResults = result.results.filter((r) => r.bridgeId === 'claude'); + assert.equal(claudeResults.length, 2); + }); + + it('creates output directories automatically', async () => { + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); + + // .claude/rules/ does not exist yet + assert.ok(!(await fileExists(join(tmpDir, '.claude', 'rules')))); + + await executePipeline({ cwd: tmpDir, tool: 'claude' }); + + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); + }); + + it('generates correct frontmatter with scope metadata', async () => { + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': RULES_WITH_METADATA }); + + await executePipeline({ cwd: tmpDir, tool: 'claude' }); + + const content = await readFile(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'), 'utf-8'); + assert.ok(content.includes('paths:')); + assert.ok(content.includes('"src/"')); + }); +}); + +describe('executePipeline MarkerBridge output', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-marker-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('generates marker-based output for MarkerBridge', async () => { + await setupProject(tmpDir, COPILOT_CONFIG, { 'conventions.yml': VALID_RULES }); + + const result = await executePipeline({ cwd: tmpDir }); + + const copilotResult = result.results.find((r) => r.bridgeId === 'copilot'); + assert.ok(copilotResult); + assert.equal(copilotResult.success, true); + + const content = await readFile(join(tmpDir, '.github', 'copilot-instructions.md'), 'utf-8'); + assert.ok(content.includes('')); + assert.ok(content.includes('')); + assert.ok(content.includes('Always use named exports.')); + }); + + it('preserves existing content outside markers', async () => { + await setupProject(tmpDir, COPILOT_CONFIG, { 'conventions.yml': VALID_RULES }); + + // Pre-populate the file with user content + await mkdir(join(tmpDir, '.github'), { recursive: true }); + await writeFile( + join(tmpDir, '.github', 'copilot-instructions.md'), + '# My Custom Rules\n\nDo not touch this.\n', + 'utf-8', + ); + + await executePipeline({ cwd: tmpDir }); + + const content = await readFile(join(tmpDir, '.github', 'copilot-instructions.md'), 'utf-8'); + assert.ok(content.includes('# My Custom Rules')); + assert.ok(content.includes('Do not touch this.')); + assert.ok(content.includes('')); + }); +}); + +describe('executePipeline mixed bridges', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-mixed-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('handles both DirectoryBridge and MarkerBridge in same run', async () => { + await setupProject(tmpDir, MIXED_CONFIG, { 'conventions.yml': VALID_RULES }); + + const result = await executePipeline({ cwd: tmpDir }); + + // Claude (DirectoryBridge) should write to .claude/rules/ + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); + + // Copilot (MarkerBridge) should write to .github/copilot-instructions.md + assert.ok(await fileExists(join(tmpDir, '.github', 'copilot-instructions.md'))); + + const claudeResults = result.results.filter((r) => r.bridgeId === 'claude'); + const copilotResults = result.results.filter((r) => r.bridgeId === 'copilot'); + + assert.ok(claudeResults.length > 0); + assert.ok(copilotResults.length > 0); + assert.ok(claudeResults.every((r) => r.success)); + assert.ok(copilotResults.every((r) => r.success)); + }); +}); + +describe('executePipeline stale file cleanup', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-stale-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('removes orphaned dwf- files from previous compile', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + // Pre-populate stale file + await mkdir(join(tmpDir, '.claude', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.claude', 'rules', 'dwf-testing.md'), 'old content'); + + const result = await executePipeline({ cwd: tmpDir }); + + // New file should exist + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); + // Stale file should be removed + assert.ok(!(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-testing.md')))); + + // Should report stale files + assert.ok(result.staleResults.length > 0); + const claudeStale = result.staleResults.find((s) => s.bridgeId === 'claude'); + assert.ok(claudeStale); + assert.ok(claudeStale.deleted.includes('dwf-testing.md')); + }); + + it('does not touch non-dwf files', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + // Pre-populate a user file + await mkdir(join(tmpDir, '.claude', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.claude', 'rules', 'my-custom-rule.md'), 'user content'); + + await executePipeline({ cwd: tmpDir }); + + // User file should still exist + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'my-custom-rule.md'))); + const userContent = await readFile(join(tmpDir, '.claude', 'rules', 'my-custom-rule.md'), 'utf-8'); + assert.equal(userContent, 'user content'); + }); + + it('cleans all dwf- files when no active rules', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + const disabledRules = `scope: conventions +rules: + - id: named-exports + severity: error + content: Always use named exports. + enabled: false +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': disabledRules }); + + // Pre-populate old generated files + await mkdir(join(tmpDir, '.claude', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'), 'old content'); + + await executePipeline({ cwd: tmpDir }); + + // Should be cleaned up + const remaining = await readdir(join(tmpDir, '.claude', 'rules')); + assert.ok(!remaining.includes('dwf-conventions.md')); + }); +}); + +describe('executePipeline legacy migration', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-legacy-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('migrates legacy files on first v2 compile', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + // Create legacy files + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'old cursor content'); + + const result = await executePipeline({ cwd: tmpDir }); + + // Legacy file should be removed + assert.ok(!(await fileExists(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc')))); + // Migration actions should be reported + assert.ok(result.migration.actions.length > 0); + }); + + it('removes legacy marker block from CLAUDE.md', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + // Create CLAUDE.md with legacy markers + await writeFile( + join(tmpDir, 'CLAUDE.md'), + '# Notes\n\n\n# Rules\n\n\n# More', + 'utf-8', + ); + + const result = await executePipeline({ cwd: tmpDir }); + + // CLAUDE.md should still exist but without markers + const content = await readFile(join(tmpDir, 'CLAUDE.md'), 'utf-8'); + assert.ok(content.includes('# Notes')); + assert.ok(content.includes('# More')); + assert.ok(!content.includes('BEGIN dev-workflows')); + assert.ok(result.migration.actions.length > 0); + }); + + it('migration is idempotent when no legacy files exist', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + const result = await executePipeline({ cwd: tmpDir }); + + assert.equal(result.migration.actions.length, 0); + }); + + it('does not migrate in dry-run mode', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + // Create legacy file + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'old cursor content'); + + const result = await executePipeline({ cwd: tmpDir, write: false }); + + // Legacy file should NOT be removed in dry-run + assert.ok(await fileExists(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'))); + assert.equal(result.migration.actions.length, 0); + }); +}); + +describe('executePipeline dry-run', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-dry-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('shows files for DirectoryBridge without writing', async () => { + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); + + const result = await executePipeline({ cwd: tmpDir, write: false }); + + const claudeResults = result.results.filter((r) => r.bridgeId === 'claude'); + assert.ok(claudeResults.length > 0); + for (const r of claudeResults) { + assert.ok(r.content); + assert.ok(r.outputPath.includes('.claude/rules/')); + } + + // No files should be written + assert.ok(!(await fileExists(join(tmpDir, '.claude', 'rules')))); + }); + + it('shows files for MarkerBridge without writing', async () => { + await setupProject(tmpDir, COPILOT_CONFIG, { 'conventions.yml': VALID_RULES }); + + const result = await executePipeline({ cwd: tmpDir, write: false }); + + const copilotResults = result.results.filter((r) => r.bridgeId === 'copilot'); + assert.ok(copilotResults.length > 0); + for (const r of copilotResults) { + assert.ok(r.content); + } + + assert.ok(!(await fileExists(join(tmpDir, '.github', 'copilot-instructions.md')))); + }); +}); diff --git a/packages/cli/tests/commands/explain.test.ts b/packages/cli/tests/commands/explain.test.ts index 7d6d013..421e570 100644 --- a/packages/cli/tests/commands/explain.test.ts +++ b/packages/cli/tests/commands/explain.test.ts @@ -77,7 +77,7 @@ describe('devw explain', () => { assert.ok(result.stdout.includes('claude')); assert.ok(result.stdout.includes('.claude/rules/dwf-')); assert.ok(result.stdout.includes('Rules:')); - assert.ok(result.stdout.includes('architecture:')); + assert.ok(result.stdout.includes('multi-file')); }); it('shows excluded rules with reasons', async () => { diff --git a/packages/cli/tests/core/parser.test.ts b/packages/cli/tests/core/parser.test.ts new file mode 100644 index 0000000..f92d930 --- /dev/null +++ b/packages/cli/tests/core/parser.test.ts @@ -0,0 +1,307 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, mkdir, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { readConfig, readRules } from '../../src/core/parser.js'; + +const BASE_CONFIG = `version: "0.1" +project: + name: test-project +tools: + - claude +mode: copy +blocks: []`; + +const CONFIG_V02 = `version: "0.2" +project: + name: test-project +tools: + - claude +mode: copy +blocks: [] +global: false`; + +async function createProject(dir: string, configYaml: string): Promise { + await mkdir(join(dir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(dir, '.dwf', 'config.yml'), configYaml, 'utf-8'); +} + +describe('readRules scope metadata parsing', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-parser-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('parses YAML with top-level metadata fields', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +globs: + - "**/*.ts" + - "**/*.tsx" +paths: + - "src/" +trigger: always +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + assert.ok(rules[0]?.metadata); + assert.deepEqual(rules[0]?.metadata?.globs, ['**/*.ts', '**/*.tsx']); + assert.deepEqual(rules[0]?.metadata?.paths, ['src/']); + assert.equal(rules[0]?.metadata?.trigger, 'always'); + }); + + it('parses YAML with nested metadata block', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +metadata: + globs: + - "**/*.ts" + trigger: glob +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + assert.ok(rules[0]?.metadata); + assert.deepEqual(rules[0]?.metadata?.globs, ['**/*.ts']); + assert.equal(rules[0]?.metadata?.trigger, 'glob'); + }); + + it('parses YAML without metadata block (backward compat)', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + assert.equal(rules[0]?.metadata, undefined); + }); + + it('rejects invalid trigger value with warning', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +trigger: invalid +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + // Metadata should be undefined because validation failed + assert.equal(rules[0]?.metadata, undefined); + }); + + it('rejects non-array globs with warning', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +globs: "**/*.ts" +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + // Metadata should be undefined because validation failed + assert.equal(rules[0]?.metadata, undefined); + }); + + it('rejects non-array paths with warning', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +paths: "src/" +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + assert.equal(rules[0]?.metadata, undefined); + }); + + it('attaches same metadata to all rules in the scope', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +globs: + - "**/*.ts" +trigger: always +rules: + - id: named-exports + severity: error + content: Always use named exports. + - id: no-barrel + severity: warning + content: Avoid barrel files. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 2); + assert.deepEqual(rules[0]?.metadata?.globs, ['**/*.ts']); + assert.equal(rules[0]?.metadata?.trigger, 'always'); + assert.deepEqual(rules[1]?.metadata?.globs, ['**/*.ts']); + assert.equal(rules[1]?.metadata?.trigger, 'always'); + }); + + it('top-level fields take precedence over nested metadata', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +metadata: + trigger: glob +trigger: always +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + assert.equal(rules[0]?.metadata?.trigger, 'always'); + }); +}); + +describe('readConfig version handling', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-config-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('reads v0.1 config with global defaulting to true', async () => { + await createProject(tmpDir, BASE_CONFIG); + + const config = await readConfig(tmpDir); + + assert.equal(config.version, '0.1'); + assert.equal(config.global, true); + }); + + it('reads v0.2 config with explicit global: false', async () => { + await createProject(tmpDir, CONFIG_V02); + + const config = await readConfig(tmpDir); + + assert.equal(config.version, '0.2'); + assert.equal(config.global, false); + }); + + it('reads v0.2 config with global: true', async () => { + const configWithGlobalTrue = `version: "0.2" +project: + name: test-project +tools: + - claude +mode: copy +blocks: [] +global: true`; + + await createProject(tmpDir, configWithGlobalTrue); + + const config = await readConfig(tmpDir); + + assert.equal(config.version, '0.2'); + assert.equal(config.global, true); + }); + + it('rejects unsupported config version', async () => { + const badConfig = `version: "99.0" +project: + name: test-project +tools: + - claude +mode: copy +blocks: []`; + + await createProject(tmpDir, badConfig); + + await assert.rejects( + () => readConfig(tmpDir), + (err: Error) => { + assert.ok(err.message.includes('unsupported version')); + assert.ok(err.message.includes('99.0')); + return true; + }, + ); + }); + + it('defaults to v0.1 when version is missing', async () => { + const noVersion = `project: + name: test-project +tools: + - claude +mode: copy +blocks: []`; + + await createProject(tmpDir, noVersion); + + const config = await readConfig(tmpDir); + assert.equal(config.version, '0.1'); + }); +}); diff --git a/packages/cli/tests/core/schema.test.ts b/packages/cli/tests/core/schema.test.ts index d100a2f..a16082e 100644 --- a/packages/cli/tests/core/schema.test.ts +++ b/packages/cli/tests/core/schema.test.ts @@ -1,6 +1,15 @@ import { describe, it } from 'node:test'; import assert from 'node:assert/strict'; -import { isValidScope, isBuiltinScope, BUILTIN_SCOPES, VALID_TOOL_IDS } from '../../src/core/schema.js'; +import { + isValidScope, + isBuiltinScope, + isValidTrigger, + BUILTIN_SCOPES, + VALID_TOOL_IDS, + VALID_TRIGGERS, + VALID_CONFIG_VERSIONS, + validateScopeMetadata, +} from '../../src/core/schema.js'; describe('isValidScope', () => { it('accepts built-in scopes', () => { @@ -81,3 +90,104 @@ describe('VALID_TOOL_IDS', () => { assert.deepEqual([...VALID_TOOL_IDS], ['claude', 'cursor', 'gemini', 'windsurf', 'copilot']); }); }); + +describe('VALID_CONFIG_VERSIONS', () => { + it('contains 0.1 and 0.2', () => { + assert.deepEqual([...VALID_CONFIG_VERSIONS], ['0.1', '0.2']); + }); +}); + +describe('VALID_TRIGGERS', () => { + it('contains always, glob, and manual', () => { + assert.deepEqual([...VALID_TRIGGERS], ['always', 'glob', 'manual']); + }); +}); + +describe('isValidTrigger', () => { + it('returns true for valid triggers', () => { + assert.equal(isValidTrigger('always'), true); + assert.equal(isValidTrigger('glob'), true); + assert.equal(isValidTrigger('manual'), true); + }); + + it('returns false for invalid triggers', () => { + assert.equal(isValidTrigger('invalid'), false); + assert.equal(isValidTrigger('auto'), false); + assert.equal(isValidTrigger(''), false); + }); +}); + +describe('validateScopeMetadata', () => { + it('returns valid metadata with globs array', () => { + const result = validateScopeMetadata({ globs: ['**/*.ts', '**/*.tsx'] }); + assert.equal(result.errors.length, 0); + assert.ok(result.metadata); + assert.deepEqual(result.metadata?.globs, ['**/*.ts', '**/*.tsx']); + }); + + it('returns valid metadata with paths array', () => { + const result = validateScopeMetadata({ paths: ['src/', 'lib/'] }); + assert.equal(result.errors.length, 0); + assert.ok(result.metadata); + assert.deepEqual(result.metadata?.paths, ['src/', 'lib/']); + }); + + it('returns valid metadata with trigger value', () => { + const result = validateScopeMetadata({ trigger: 'always' }); + assert.equal(result.errors.length, 0); + assert.ok(result.metadata); + assert.equal(result.metadata?.trigger, 'always'); + }); + + it('returns undefined metadata for empty input', () => { + const result = validateScopeMetadata({}); + assert.equal(result.errors.length, 0); + assert.equal(result.metadata, undefined); + }); + + it('rejects non-array globs', () => { + const result = validateScopeMetadata({ globs: '**/*.ts' }); + assert.ok(result.errors.length > 0); + assert.ok(result.errors[0]?.field === 'globs'); + }); + + it('rejects non-string items in globs array', () => { + const result = validateScopeMetadata({ globs: [123, true] }); + assert.ok(result.errors.length > 0); + assert.ok(result.errors[0]?.field === 'globs'); + }); + + it('rejects non-array paths', () => { + const result = validateScopeMetadata({ paths: 'src/' }); + assert.ok(result.errors.length > 0); + assert.ok(result.errors[0]?.field === 'paths'); + }); + + it('rejects invalid trigger value', () => { + const result = validateScopeMetadata({ trigger: 'invalid' }); + assert.ok(result.errors.length > 0); + assert.ok(result.errors[0]?.field === 'trigger'); + assert.ok(result.errors[0]?.message.includes('invalid')); + }); + + it('validates all fields together', () => { + const result = validateScopeMetadata({ + globs: ['**/*.ts'], + paths: ['src/'], + trigger: 'glob', + }); + assert.equal(result.errors.length, 0); + assert.ok(result.metadata); + assert.deepEqual(result.metadata?.globs, ['**/*.ts']); + assert.deepEqual(result.metadata?.paths, ['src/']); + assert.equal(result.metadata?.trigger, 'glob'); + }); + + it('reports multiple errors at once', () => { + const result = validateScopeMetadata({ + globs: 'not-an-array', + trigger: 'invalid', + }); + assert.equal(result.errors.length, 2); + }); +}); diff --git a/packages/cli/tests/e2e/cli.test.ts b/packages/cli/tests/e2e/cli.test.ts index 75cf3f6..1d41fb4 100644 --- a/packages/cli/tests/e2e/cli.test.ts +++ b/packages/cli/tests/e2e/cli.test.ts @@ -256,4 +256,83 @@ rules: assert.equal(result.exitCode, 1); assert.ok(result.stderr.includes('not installed')); }); + + it('compile generates multi-file output for directory bridges', async () => { + await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); + + // Write two scope files + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'security.yml'), + `scope: security +rules: + - id: no-eval + severity: error + content: Never use eval. +`, + 'utf-8', + ); + + const result = await run(['compile'], tmpDir); + + assert.equal(result.exitCode, 0); + + // Both scope files should be generated + const convFile = await readFile(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'), 'utf-8'); + assert.ok(convFile.includes('named exports')); + + const secFile = await readFile(join(tmpDir, '.claude', 'rules', 'dwf-security.md'), 'utf-8'); + assert.ok(secFile.includes('eval')); + }); + + it('compile --dry-run lists files without writing', async () => { + await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); + + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const result = await run(['compile', '--dry-run'], tmpDir); + + assert.equal(result.exitCode, 0); + assert.ok(result.stdout.includes('Dry run')); + assert.ok(result.stdout.includes('.claude/rules/dwf-conventions.md')); + }); + + it('explain shows multi-file output paths for directory bridges', async () => { + await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); + + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const result = await run(['explain'], tmpDir); + + assert.equal(result.exitCode, 0); + assert.ok(result.stdout.includes('multi-file')); + assert.ok(result.stdout.includes('.claude/rules/dwf-')); + }); }); diff --git a/packages/cli/tests/ui/output.test.ts b/packages/cli/tests/ui/output.test.ts index 8721d2c..ea2a898 100644 --- a/packages/cli/tests/ui/output.test.ts +++ b/packages/cli/tests/ui/output.test.ts @@ -239,7 +239,7 @@ describe('output format: explain', () => { const result = await run(['explain'], tmpDir); assert.ok(result.stdout.includes('markers (BEGIN/END)'), 'should show markers mode for copilot'); - assert.ok(result.stdout.includes('full file'), 'should show full file mode for cursor'); + assert.ok(result.stdout.includes('multi-file (one per scope)'), 'should show multi-file mode for cursor'); }); }); From 5dc770d5ff1f4ebe384fe4397f944eb2b60fab08 Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 15:49:15 +0200 Subject: [PATCH 14/18] feat(cli): complete phase 2 canonical distribution and hardening --- packages/cli/src/commands/compile.ts | 57 ++++- packages/cli/src/commands/doctor.ts | 255 +++++++++++++++++++- packages/cli/src/commands/list.ts | 10 +- packages/cli/src/core/canonical.ts | 63 +++++ packages/cli/tests/commands/compile.test.ts | 42 +++- packages/cli/tests/commands/doctor.test.ts | 114 +++++++++ packages/cli/tests/core/canonical.test.ts | 93 +++++++ packages/cli/tests/e2e/cli.test.ts | 1 + packages/cli/tests/ui/output.test.ts | 15 ++ 9 files changed, 632 insertions(+), 18 deletions(-) create mode 100644 packages/cli/src/core/canonical.ts create mode 100644 packages/cli/tests/core/canonical.test.ts diff --git a/packages/cli/src/commands/compile.ts b/packages/cli/src/commands/compile.ts index c8dbe93..a71dcb8 100644 --- a/packages/cli/src/commands/compile.ts +++ b/packages/cli/src/commands/compile.ts @@ -15,6 +15,7 @@ import { copilotBridge } from '../bridges/copilot.js'; import { mergeMarkedContent, removeMarkedBlock } from '../core/markers.js'; import { cleanStaleFiles } from '../core/scope-filename.js'; import { detectLegacyFiles, migrateLegacyFiles } from '../core/cleanup.js'; +import { buildCanonicalOutputs, writeCanonical } from '../core/canonical.js'; import { fileExists } from '../utils/fs.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; @@ -45,6 +46,8 @@ export interface MigrationResult { export interface CompileResult { results: BridgeResult[]; activeRuleCount: number; + canonicalFileCount: number; + canonicalError?: string; assetPaths: string[]; elapsedMs: number; staleResults: StaleFileResult[]; @@ -235,6 +238,35 @@ export async function executePipeline(options: PipelineOptions): Promise 0) { + for (const relativePath of errorPaths) { + results.push({ bridgeId: 'canonical', outputPath: relativePath, success: false, error: canonicalError }); + } + } else { + results.push({ bridgeId: 'canonical', outputPath: '.agents/rules/devw', success: false, error: canonicalError }); + } + } + } else { + for (const [relativePath, content] of canonicalOutputs) { + canonicalPaths.push(relativePath); + results.push({ bridgeId: 'canonical', outputPath: relativePath, success: true, content }); + } + } + let assetPaths: string[] = []; if (write) { const hash = computeRulesHash(activeRules); @@ -245,7 +277,16 @@ export async function executePipeline(options: PipelineOptions): Promise { @@ -286,12 +327,23 @@ export async function runCompile(options: CompileOptions): Promise { // Summary of what would be generated const fileCount = result.results.filter((r) => r.success).length; ui.newline(); - ui.info(`Would generate ${String(fileCount)} file${fileCount !== 1 ? 's' : ''} from ${String(result.activeRuleCount)} rules`); + ui.info( + `Would generate ${String(fileCount)} file${fileCount !== 1 ? 's' : ''} (${String(result.canonicalFileCount)} canonical) from ${String(result.activeRuleCount)} rules`, + ); return; } const result = await executePipeline({ cwd, tool: options.tool }); + if (options.tool) { + ui.info('Note: canonical output is always refreshed in .agents/rules/devw'); + } + + if (result.canonicalError) { + ui.warn(`Canonical write failed: ${result.canonicalError}`); + ui.warn('Tool-specific outputs were still written'); + } + // Show migration messages if any if (result.migration.actions.length > 0) { ui.newline(); @@ -306,6 +358,7 @@ export async function runCompile(options: CompileOptions): Promise { ui.newline(); ui.success(`Compiled ${String(result.activeRuleCount)} rules ${ICONS.arrow} ${String(allPaths.length)} file${allPaths.length !== 1 ? 's' : ''} ${ui.timing(result.elapsedMs)}`); + ui.info(`Canonical files: ${String(result.canonicalFileCount)}`); ui.newline(); if (options.verbose) { diff --git a/packages/cli/src/commands/doctor.ts b/packages/cli/src/commands/doctor.ts index 4778cfa..9045d63 100644 --- a/packages/cli/src/commands/doctor.ts +++ b/packages/cli/src/commands/doctor.ts @@ -1,5 +1,5 @@ import { lstat, readFile, readdir } from 'node:fs/promises'; -import { join } from 'node:path'; +import { basename, join, relative } from 'node:path'; import type { Command } from 'commander'; import { parse } from 'yaml'; import { readConfig, readRules } from '../core/parser.js'; @@ -9,14 +9,23 @@ import { cursorBridge } from '../bridges/cursor.js'; import { geminiBridge } from '../bridges/gemini.js'; import { windsurfBridge } from '../bridges/windsurf.js'; import { copilotBridge } from '../bridges/copilot.js'; -import type { Bridge, ProjectConfig, PulledEntry, AssetEntry, Rule } from '../bridges/types.js'; -import { getBridgeOutputPaths } from '../bridges/types.js'; +import type { Bridge, DirectoryBridge, ProjectConfig, PulledEntry, AssetEntry, Rule } from '../bridges/types.js'; +import { getBridgeOutputPaths, isDirectoryBridge } from '../bridges/types.js'; import { fileExists } from '../utils/fs.js'; import { isValidScope } from '../core/schema.js'; +import { buildCanonicalOutputs } from '../core/canonical.js'; +import { detectLegacyFiles } from '../core/cleanup.js'; import * as ui from '../utils/ui.js'; const BRIDGES: Bridge[] = [claudeBridge, cursorBridge, geminiBridge, windsurfBridge, copilotBridge]; const BRIDGE_IDS = new Set(BRIDGES.map((b) => b.id)); +const DIRECTORY_BRIDGE_IDS = new Set(BRIDGES.filter(isDirectoryBridge).map((bridge) => bridge.id)); + +function getConfiguredDirectoryBridges(config: ProjectConfig): DirectoryBridge[] { + return BRIDGES.filter((bridge): bridge is DirectoryBridge => { + return isDirectoryBridge(bridge) && DIRECTORY_BRIDGE_IDS.has(bridge.id) && config.tools.includes(bridge.id); + }); +} export interface CheckResult { passed: boolean; @@ -258,6 +267,212 @@ export async function checkHashSync(cwd: string, rules: Rule[]): Promise { + const canonicalDir = join(cwd, '.agents', 'rules', 'devw'); + + let entries: string[]; + try { + entries = await readdir(canonicalDir); + } catch { + return { + passed: false, + message: '.agents/rules/devw not found — run "devw compile"', + }; + } + + const canonicalFiles = entries.filter((entry) => entry.startsWith('dwf-') && entry.endsWith('.md')); + if (canonicalFiles.length === 0) { + return { + passed: false, + message: '.agents/rules/devw has no canonical files — run "devw compile"', + }; + } + + return { + passed: true, + message: `Canonical files exist (${String(canonicalFiles.length)} file${canonicalFiles.length === 1 ? '' : 's'})`, + }; +} + +export async function checkCanonicalSync(cwd: string, rules: Rule[], config: ProjectConfig): Promise { + const directoryBridges = getConfiguredDirectoryBridges(config); + + if (directoryBridges.length === 0) { + return { + passed: true, + message: 'Canonical sync skipped (no directory tools configured)', + skipped: true, + }; + } + + const canonicalOutputs = buildCanonicalOutputs(rules); + if (canonicalOutputs.size === 0) { + return { + passed: true, + message: 'Canonical sync skipped (no active scope outputs)', + skipped: true, + }; + } + + const mismatches: string[] = []; + let compared = 0; + + for (const bridge of directoryBridges) { + const expectedNativeFiles = new Set(); + + for (const [canonicalPath, canonicalContent] of canonicalOutputs) { + const canonicalFilename = basename(canonicalPath); + const scopeName = canonicalFilename.slice('dwf-'.length, canonicalFilename.length - '.md'.length); + const nativeFilename = `${bridge.filePrefix}${scopeName}${bridge.fileExtension}`; + expectedNativeFiles.add(nativeFilename); + + const nativePath = join(cwd, bridge.outputDir, nativeFilename); + if (!(await fileExists(nativePath))) { + mismatches.push(`${bridge.id}: missing ${nativeFilename}`); + continue; + } + + const nativeRaw = await readFile(nativePath, 'utf-8'); + const normalizedNative = normalizeComparableContent(nativeRaw); + const normalizedCanonical = normalizeComparableContent(canonicalContent); + + compared += 1; + if (normalizedNative !== normalizedCanonical) { + mismatches.push(`${bridge.id}: modified ${nativeFilename}`); + } + } + + const bridgeDir = join(cwd, bridge.outputDir); + let entries: string[] = []; + try { + entries = await readdir(bridgeDir); + } catch { + entries = []; + } + + for (const entry of entries) { + if (!entry.startsWith(bridge.filePrefix) || !entry.endsWith(bridge.fileExtension)) { + continue; + } + if (!expectedNativeFiles.has(entry)) { + mismatches.push(`${bridge.id}: unexpected ${entry}`); + } + } + } + + if (mismatches.length > 0) { + return { + passed: false, + message: `Canonical/native mismatch: ${mismatches.join(', ')}`, + }; + } + + return { + passed: true, + message: `Canonical and native files are in sync (${String(compared)} files compared)`, + }; +} + +export async function checkLegacyMigration(cwd: string): Promise { + const legacyFiles = await detectLegacyFiles(cwd); + if (legacyFiles.length === 0) { + return { passed: true, message: 'No legacy v0.5/v0.6 files pending migration' }; + } + + const pending = legacyFiles.map((legacy) => relative(cwd, legacy.path)); + return { + passed: false, + message: `Legacy files still present: ${pending.join(', ')}`, + }; +} + +export async function checkNativeFrontmatter(cwd: string, config: ProjectConfig): Promise { + const directoryBridges = getConfiguredDirectoryBridges(config); + + if (directoryBridges.length === 0) { + return { + passed: true, + message: 'Frontmatter check skipped (no directory tools configured)', + skipped: true, + }; + } + + const errors: string[] = []; + let checked = 0; + + for (const bridge of directoryBridges) { + const dirPath = join(cwd, bridge.outputDir); + let entries: string[] = []; + try { + entries = await readdir(dirPath); + } catch { + entries = []; + } + + for (const entry of entries) { + if (!entry.startsWith(bridge.filePrefix) || !entry.endsWith(bridge.fileExtension)) { + continue; + } + + checked += 1; + const filePath = join(dirPath, entry); + const content = await readFile(filePath, 'utf-8'); + const frontmatter = extractFrontmatter(content); + const requiresFrontmatter = bridge.id === 'cursor' || bridge.id === 'windsurf'; + + if (frontmatter === null) { + if (requiresFrontmatter) { + errors.push(`${bridge.id}: missing frontmatter in ${entry}`); + } + continue; + } + + try { + const parsed = parse(frontmatter); + if (parsed === null || typeof parsed !== 'object' || Array.isArray(parsed)) { + errors.push(`${bridge.id}: invalid frontmatter object in ${entry}`); + } + } catch { + errors.push(`${bridge.id}: invalid YAML frontmatter in ${entry}`); + } + } + } + + if (errors.length > 0) { + return { + passed: false, + message: `Invalid native frontmatter: ${errors.join(', ')}`, + }; + } + + if (checked === 0) { + return { + passed: true, + message: 'Frontmatter check skipped (no native files found)', + skipped: true, + }; + } + + return { + passed: true, + message: `Native frontmatter is valid (${String(checked)} files checked)`, + }; +} + export async function runDoctor(): Promise { const cwd = process.cwd(); const startTime = performance.now(); @@ -282,11 +497,6 @@ export async function runDoctor(): Promise { const configValidResult = await checkConfigValid(cwd); results.push(configValidResult); - let config: ProjectConfig | null = null; - if (configValidResult.passed) { - config = await readConfig(cwd); - } - // Check 3: Rule files are valid YAML const rulesValidResult = await checkRulesValid(cwd); results.push(rulesValidResult); @@ -301,6 +511,8 @@ export async function runDoctor(): Promise { return; } + const config = await readConfig(cwd); + // Load rules for remaining checks let rules: Rule[] = []; try { @@ -318,26 +530,43 @@ export async function runDoctor(): Promise { results.push(scopeResult); // Check 6: Tools have bridges - // config is guaranteed non-null here since configValidResult.passed - const bridgeResult = checkBridgesAvailable(config!); + const bridgeResult = checkBridgesAvailable(config); results.push(bridgeResult); // Check 7: Symlinks valid (conditional on mode) - const symlinkResult = await checkSymlinks(cwd, config!); + const symlinkResult = await checkSymlinks(cwd, config); results.push(symlinkResult); // Check 8: Pulled files exist - const pulledResult = await checkPulledFilesExist(cwd, config!.pulled); + const pulledResult = await checkPulledFilesExist(cwd, config.pulled); results.push(pulledResult); // Check 9: Asset files exist - const assetResult = await checkAssetFilesExist(cwd, config!.assets); + const assetResult = await checkAssetFilesExist(cwd, config.assets); results.push(assetResult); // Check 10: Hash sync (conditional on compiled files existing) const hashResult = await checkHashSync(cwd, rules); results.push(hashResult); + // Check 11: Canonical output exists (skip if no rules) + if (rules.length > 0) { + const canonicalExistsResult = await checkCanonicalExists(cwd); + results.push(canonicalExistsResult); + + // Check 12: Canonical and native outputs are synchronized + const canonicalSyncResult = await checkCanonicalSync(cwd, rules, config); + results.push(canonicalSyncResult); + } + + // Check 13: Legacy migration has no pending files + const legacyResult = await checkLegacyMigration(cwd); + results.push(legacyResult); + + // Check 14: Native files have valid frontmatter for their editor + const frontmatterResult = await checkNativeFrontmatter(cwd, config); + results.push(frontmatterResult); + // Output for (const r of results) { ui.check(r.passed, r.message, r.skipped); diff --git a/packages/cli/src/commands/list.ts b/packages/cli/src/commands/list.ts index 4c7198e..b01d736 100644 --- a/packages/cli/src/commands/list.ts +++ b/packages/cli/src/commands/list.ts @@ -10,6 +10,7 @@ import { windsurfBridge } from '../bridges/windsurf.js'; import { copilotBridge } from '../bridges/copilot.js'; import type { Bridge } from '../bridges/types.js'; import { ASSET_TYPE, isDirectoryBridge, getBridgeOutputPaths } from '../bridges/types.js'; +import { filterRules, groupByScope } from '../core/helpers.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; @@ -70,6 +71,13 @@ async function listTools(): Promise { if (!(await ensureConfig(cwd))) return; const config = await readConfig(cwd); + let activeScopeCount = 0; + try { + const rules = await readRules(cwd); + activeScopeCount = groupByScope(filterRules(rules)).size; + } catch { + activeScopeCount = 0; + } if (config.tools.length === 0) { ui.warn('No tools configured'); @@ -83,7 +91,7 @@ async function listTools(): Promise { let outputLabel: string | undefined; if (bridge) { if (isDirectoryBridge(bridge)) { - outputLabel = `${bridge.outputDir}/${bridge.filePrefix}*${bridge.fileExtension}`; + outputLabel = `${bridge.outputDir}/${bridge.filePrefix}*${bridge.fileExtension} (${String(activeScopeCount)} file${activeScopeCount === 1 ? '' : 's'})`; } else { const paths = getBridgeOutputPaths(bridge); outputLabel = paths[0]; diff --git a/packages/cli/src/core/canonical.ts b/packages/cli/src/core/canonical.ts new file mode 100644 index 0000000..75de964 --- /dev/null +++ b/packages/cli/src/core/canonical.ts @@ -0,0 +1,63 @@ +import { mkdir, writeFile } from 'node:fs/promises'; +import { basename, join } from 'node:path'; +import type { Rule } from '../bridges/types.js'; +import { filterRules, formatScopeHeading, groupByScope } from './helpers.js'; +import { cleanStaleFiles, scopeToFilename } from './scope-filename.js'; + +const GENERATED_COMMENT = ''; +const CANONICAL_DIR_PARTS = ['.agents', 'rules', 'devw'] as const; +const CANONICAL_PREFIX = 'dwf-'; +const CANONICAL_EXTENSION = '.md'; + +export function buildCanonicalMarkdown(scope: string, rules: Rule[]): string { + const lines: string[] = [GENERATED_COMMENT, `# ${formatScopeHeading(scope)}`, '']; + + for (const rule of rules) { + const contentLines = rule.content.split('\n'); + const first = contentLines[0]; + if (first !== undefined) { + lines.push(`- ${first}`); + } + + for (let i = 1; i < contentLines.length; i++) { + const line = contentLines[i]; + if (line !== undefined) { + lines.push(line.length > 0 ? ` ${line}` : ''); + } + } + } + + lines.push(''); + return lines.join('\n'); +} + +export function buildCanonicalOutputs(rules: Rule[]): Map { + const output = new Map(); + const filtered = filterRules(rules); + const grouped = groupByScope(filtered); + + for (const [scope, scopeRules] of grouped) { + const filename = scopeToFilename(scope, CANONICAL_PREFIX, CANONICAL_EXTENSION); + const relativePath = join(...CANONICAL_DIR_PARTS, filename); + output.set(relativePath, buildCanonicalMarkdown(scope, scopeRules)); + } + + return output; +} + +export async function writeCanonical(cwd: string, rulesOrOutputs: Rule[] | Map): Promise { + const canonicalDir = join(cwd, ...CANONICAL_DIR_PARTS); + await mkdir(canonicalDir, { recursive: true }); + + const outputs = rulesOrOutputs instanceof Map ? rulesOrOutputs : buildCanonicalOutputs(rulesOrOutputs); + const writtenFilenames = new Set(); + + for (const [relativePath, content] of outputs) { + const filename = basename(relativePath); + writtenFilenames.add(filename); + await writeFile(join(cwd, relativePath), content, 'utf-8'); + } + + await cleanStaleFiles(canonicalDir, CANONICAL_PREFIX, CANONICAL_EXTENSION, writtenFilenames); + return [...outputs.keys()]; +} diff --git a/packages/cli/tests/commands/compile.test.ts b/packages/cli/tests/commands/compile.test.ts index 45879ef..255754a 100644 --- a/packages/cli/tests/commands/compile.test.ts +++ b/packages/cli/tests/commands/compile.test.ts @@ -113,14 +113,34 @@ describe('executePipeline', () => { assert.equal(cursorResult.success, true); }); - it('tool option filters to single bridge', async () => { + it('tool option filters bridge but still includes canonical outputs', async () => { await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); const result = await executePipeline({ cwd: tmpDir, tool: 'claude' }); const bridgeIds = new Set(result.results.map((r) => r.bridgeId)); - assert.equal(bridgeIds.size, 1); + assert.equal(bridgeIds.size, 2); assert.ok(bridgeIds.has('claude')); + assert.ok(bridgeIds.has('canonical')); + }); + + it('keeps bridge outputs when canonical write fails', async () => { + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); + + await mkdir(join(tmpDir, '.agents', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.agents', 'rules', 'devw'), 'blocking file', 'utf-8'); + + const result = await executePipeline({ cwd: tmpDir, tool: 'claude' }); + + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); + assert.ok(result.canonicalError); + + const claudeResults = result.results.filter((r) => r.bridgeId === 'claude'); + const canonicalResults = result.results.filter((r) => r.bridgeId === 'canonical'); + + assert.ok(claudeResults.every((r) => r.success)); + assert.ok(canonicalResults.length > 0); + assert.ok(canonicalResults.every((r) => !r.success)); }); it('throws on invalid tool filter', async () => { @@ -210,9 +230,12 @@ describe('executePipeline DirectoryBridge multi-file output', () => { assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-security.md'))); + assert.ok(await fileExists(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-conventions.md'))); + assert.ok(await fileExists(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-security.md'))); const claudeResults = result.results.filter((r) => r.bridgeId === 'claude'); assert.equal(claudeResults.length, 2); + assert.equal(result.canonicalFileCount, 2); }); it('creates output directories automatically', async () => { @@ -234,6 +257,11 @@ describe('executePipeline DirectoryBridge multi-file output', () => { const content = await readFile(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'), 'utf-8'); assert.ok(content.includes('paths:')); assert.ok(content.includes('"src/"')); + + const canonicalContent = await readFile(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-conventions.md'), 'utf-8'); + assert.ok(canonicalContent.startsWith('')); + assert.ok(!canonicalContent.startsWith('---')); + assert.ok(!canonicalContent.includes('paths:')); }); }); @@ -341,6 +369,8 @@ blocks: [] // Pre-populate stale file await mkdir(join(tmpDir, '.claude', 'rules'), { recursive: true }); await writeFile(join(tmpDir, '.claude', 'rules', 'dwf-testing.md'), 'old content'); + await mkdir(join(tmpDir, '.agents', 'rules', 'devw'), { recursive: true }); + await writeFile(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-testing.md'), 'old content'); const result = await executePipeline({ cwd: tmpDir }); @@ -348,6 +378,7 @@ blocks: [] assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); // Stale file should be removed assert.ok(!(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-testing.md')))); + assert.ok(!(await fileExists(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-testing.md')))); // Should report stale files assert.ok(result.staleResults.length > 0); @@ -534,14 +565,21 @@ describe('executePipeline dry-run', () => { const result = await executePipeline({ cwd: tmpDir, write: false }); const claudeResults = result.results.filter((r) => r.bridgeId === 'claude'); + const canonicalResults = result.results.filter((r) => r.bridgeId === 'canonical'); assert.ok(claudeResults.length > 0); + assert.ok(canonicalResults.length > 0); for (const r of claudeResults) { assert.ok(r.content); assert.ok(r.outputPath.includes('.claude/rules/')); } + for (const r of canonicalResults) { + assert.ok(r.content); + assert.ok(r.outputPath.includes('.agents/rules/devw/')); + } // No files should be written assert.ok(!(await fileExists(join(tmpDir, '.claude', 'rules')))); + assert.ok(!(await fileExists(join(tmpDir, '.agents', 'rules', 'devw')))); }); it('shows files for MarkerBridge without writing', async () => { diff --git a/packages/cli/tests/commands/doctor.test.ts b/packages/cli/tests/commands/doctor.test.ts index c029be9..e2311cf 100644 --- a/packages/cli/tests/commands/doctor.test.ts +++ b/packages/cli/tests/commands/doctor.test.ts @@ -12,8 +12,14 @@ import { checkBridgesAvailable, checkSymlinks, checkHashSync, + checkCanonicalExists, + checkCanonicalSync, + checkLegacyMigration, + checkNativeFrontmatter, } from '../../src/commands/doctor.js'; import { computeRulesHash, writeHash } from '../../src/core/hash.js'; +import { executePipeline } from '../../src/commands/compile.js'; +import { readConfig, readRules } from '../../src/core/parser.js'; import type { Rule, ProjectConfig } from '../../src/bridges/types.js'; const VALID_CONFIG = `version: "0.1" @@ -347,6 +353,114 @@ blocks: [] assert.ok(result.message.includes('out of sync')); }); }); + + describe('checkCanonicalExists', () => { + it('fails when canonical directory does not exist', async () => { + const result = await checkCanonicalExists(tmpDir); + assert.equal(result.passed, false); + assert.ok(result.message.includes('.agents/rules/devw')); + }); + + it('passes when canonical files exist', async () => { + await mkdir(join(tmpDir, '.agents', 'rules', 'devw'), { recursive: true }); + await writeFile(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-conventions.md'), 'content', 'utf-8'); + + const result = await checkCanonicalExists(tmpDir); + assert.equal(result.passed, true); + assert.ok(result.message.includes('1 file')); + }); + }); + + describe('checkCanonicalSync', () => { + it('passes when canonical and native files are aligned', async () => { + await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), VALID_CONFIG); + await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), VALID_RULES); + + await executePipeline({ cwd: tmpDir }); + + const config = await readConfig(tmpDir); + const rules = await readRules(tmpDir); + const result = await checkCanonicalSync(tmpDir, rules, config); + + assert.equal(result.passed, true); + assert.ok(result.message.includes('in sync')); + }); + + it('fails when native file was manually edited', async () => { + await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), VALID_CONFIG); + await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), VALID_RULES); + + await executePipeline({ cwd: tmpDir }); + await writeFile( + join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'), + '\n# Conventions\n\n- Tampered content\n', + 'utf-8', + ); + + const config = await readConfig(tmpDir); + const rules = await readRules(tmpDir); + const result = await checkCanonicalSync(tmpDir, rules, config); + + assert.equal(result.passed, false); + assert.ok(result.message.includes('Canonical/native mismatch')); + }); + }); + + describe('checkLegacyMigration', () => { + it('passes when no legacy files are present', async () => { + const result = await checkLegacyMigration(tmpDir); + assert.equal(result.passed, true); + }); + + it('fails when legacy files still exist', async () => { + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'legacy', 'utf-8'); + + const result = await checkLegacyMigration(tmpDir); + assert.equal(result.passed, false); + assert.ok(result.message.includes('devworkflows.mdc')); + }); + }); + + describe('checkNativeFrontmatter', () => { + it('passes with valid native frontmatter', async () => { + await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), VALID_CONFIG); + await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), VALID_RULES); + + await executePipeline({ cwd: tmpDir }); + + const config = await readConfig(tmpDir); + const result = await checkNativeFrontmatter(tmpDir, config); + assert.equal(result.passed, true); + }); + + it('fails with invalid YAML frontmatter', async () => { + const config: ProjectConfig = { + version: '0.1', + project: { name: 'test' }, + tools: ['cursor'], + mode: 'copy', + blocks: [], + pulled: [], + assets: [], + global: true, + }; + + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile( + join(tmpDir, '.cursor', 'rules', 'dwf-conventions.mdc'), + '---\ndescription: broken: yaml\n---\n\n', + 'utf-8', + ); + + const result = await checkNativeFrontmatter(tmpDir, config); + assert.equal(result.passed, false); + assert.ok(result.message.includes('invalid YAML frontmatter')); + }); + }); }); describe('computeRulesHash', () => { diff --git a/packages/cli/tests/core/canonical.test.ts b/packages/cli/tests/core/canonical.test.ts new file mode 100644 index 0000000..fc3117f --- /dev/null +++ b/packages/cli/tests/core/canonical.test.ts @@ -0,0 +1,93 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { access, mkdtemp, mkdir, readFile, rm, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { buildCanonicalMarkdown, writeCanonical } from '../../src/core/canonical.js'; +import type { Rule } from '../../src/bridges/types.js'; + +async function fileExists(path: string): Promise { + try { + await access(path); + return true; + } catch { + return false; + } +} + +function makeRule(overrides: Partial = {}): Rule { + return { + id: 'test-rule', + scope: 'conventions', + severity: 'error', + content: 'Always use named exports.', + enabled: true, + ...overrides, + }; +} + +describe('canonical writer', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-canonical-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + describe('buildCanonicalMarkdown', () => { + it('produces deterministic markdown without frontmatter', () => { + const rules = [ + makeRule({ id: 'named-exports', content: 'Always use named exports.' }), + makeRule({ id: 'explicit-return', content: 'Declare return types.\nNever use implicit any.' }), + ]; + + const outputA = buildCanonicalMarkdown('conventions', rules); + const outputB = buildCanonicalMarkdown('conventions', rules); + + assert.equal(outputA, outputB); + assert.ok(outputA.startsWith('')); + assert.ok(outputA.includes('# Conventions')); + assert.ok(outputA.includes('- Always use named exports.')); + assert.ok(outputA.includes('- Declare return types.')); + assert.ok(outputA.includes(' Never use implicit any.')); + assert.ok(!outputA.includes(' \n')); + assert.ok(!outputA.startsWith('---')); + assert.ok(!outputA.includes('paths:')); + assert.ok(!outputA.includes('globs:')); + }); + }); + + describe('writeCanonical', () => { + it('writes canonical files and removes stale dwf files only', async () => { + const canonicalDir = join(tmpDir, '.agents', 'rules', 'devw'); + await mkdir(canonicalDir, { recursive: true }); + await writeFile(join(canonicalDir, 'dwf-old-scope.md'), 'stale', 'utf-8'); + await writeFile(join(canonicalDir, 'my-custom-notes.md'), 'keep me', 'utf-8'); + + const rules = [ + makeRule({ id: 'named-exports', scope: 'conventions' }), + makeRule({ id: 'no-eval', scope: 'security', content: 'Never use eval().' }), + makeRule({ id: 'info-rule', scope: 'architecture', severity: 'info', content: 'Informational only.' }), + ]; + + const written = await writeCanonical(tmpDir, rules); + + assert.deepEqual(written, [ + '.agents/rules/devw/dwf-conventions.md', + '.agents/rules/devw/dwf-security.md', + ]); + + assert.equal(await fileExists(join(canonicalDir, 'dwf-conventions.md')), true); + assert.equal(await fileExists(join(canonicalDir, 'dwf-security.md')), true); + assert.equal(await fileExists(join(canonicalDir, 'dwf-old-scope.md')), false); + assert.equal(await fileExists(join(canonicalDir, 'my-custom-notes.md')), true); + + const conventions = await readFile(join(canonicalDir, 'dwf-conventions.md'), 'utf-8'); + assert.ok(conventions.includes('# Conventions')); + assert.ok(!conventions.startsWith('---')); + }); + }); +}); diff --git a/packages/cli/tests/e2e/cli.test.ts b/packages/cli/tests/e2e/cli.test.ts index 1d41fb4..6252b21 100644 --- a/packages/cli/tests/e2e/cli.test.ts +++ b/packages/cli/tests/e2e/cli.test.ts @@ -175,6 +175,7 @@ rules: it('doctor passes on valid project', async () => { await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); + await run(['compile'], tmpDir); const result = await run(['doctor'], tmpDir); assert.equal(result.exitCode, 0); diff --git a/packages/cli/tests/ui/output.test.ts b/packages/cli/tests/ui/output.test.ts index ea2a898..904b903 100644 --- a/packages/cli/tests/ui/output.test.ts +++ b/packages/cli/tests/ui/output.test.ts @@ -137,6 +137,9 @@ describe('output format: doctor', () => { await writeFile(join(tmpDir, '.dwf', 'config.yml'), CONFIG_TEMPLATE(['claude'])); await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), RULES_CONVENTIONS); + // Compile first so canonical checks pass; symlink check should still be skipped in copy mode + await run(['compile'], tmpDir); + const result = await run(['doctor'], tmpDir); assert.equal(result.exitCode, 0); @@ -192,6 +195,18 @@ describe('output format: list tools', () => { assert.ok(result.stdout.includes('\u203A'), 'should have bullet prefix'); assert.ok(result.stdout.includes('\u2192'), 'should have arrow'); assert.ok(result.stdout.includes('.claude/rules/dwf-'), 'should show output directory pattern'); + assert.ok(result.stdout.includes('(0 files)'), 'should show per-tool multi-file count'); + }); + + it('uses singular grammar for one active scope file', async () => { + await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), CONFIG_TEMPLATE(['claude'])); + await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), RULES_CONVENTIONS); + + const result = await run(['list', 'tools'], tmpDir); + + assert.equal(result.exitCode, 0); + assert.ok(result.stdout.includes('(1 file)')); }); }); From 7c6335c223b8ea7a783e818a9547bf0632941443 Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 16:18:45 +0200 Subject: [PATCH 15/18] feat(cli): complete phase 3 global scope support --- packages/cli/src/commands/compile.ts | 132 +++++++++++++++----- packages/cli/src/commands/init.ts | 69 +++++++--- packages/cli/src/core/merge.ts | 8 ++ packages/cli/src/core/parser.ts | 30 ++++- packages/cli/tests/commands/compile.test.ts | 110 ++++++++++++++++ packages/cli/tests/commands/init.test.ts | 75 +++++++++++ packages/cli/tests/core/merge.test.ts | 55 ++++++++ 7 files changed, 432 insertions(+), 47 deletions(-) create mode 100644 packages/cli/src/core/merge.ts create mode 100644 packages/cli/tests/commands/init.test.ts create mode 100644 packages/cli/tests/core/merge.test.ts diff --git a/packages/cli/src/commands/compile.ts b/packages/cli/src/commands/compile.ts index a71dcb8..343ead1 100644 --- a/packages/cli/src/commands/compile.ts +++ b/packages/cli/src/commands/compile.ts @@ -1,11 +1,13 @@ import { mkdir, writeFile, readFile, symlink, unlink } from 'node:fs/promises'; -import { join, dirname } from 'node:path'; +import { join, dirname, basename } from 'node:path'; +import { homedir } from 'node:os'; import type { Command } from 'commander'; import chalk from 'chalk'; -import { readConfig, readRules } from '../core/parser.js'; +import { readConfig, readConfigFromDwfDir, readRules } from '../core/parser.js'; +import { mergeRules } from '../core/merge.js'; import { computeRulesHash, writeHash } from '../core/hash.js'; import { deployAssets } from '../core/assets.js'; -import type { Bridge, DirectoryBridge } from '../bridges/types.js'; +import type { Bridge, DirectoryBridge, Rule } from '../bridges/types.js'; import { isDirectoryBridge, getBridgeOutputPaths } from '../bridges/types.js'; import { claudeBridge } from '../bridges/claude.js'; import { cursorBridge } from '../bridges/cursor.js'; @@ -46,6 +48,9 @@ export interface MigrationResult { export interface CompileResult { results: BridgeResult[]; activeRuleCount: number; + globalRuleCount: number; + projectRuleCount: number; + overriddenRuleIds: string[]; canonicalFileCount: number; canonicalError?: string; assetPaths: string[]; @@ -72,7 +77,7 @@ function extractFilenameFromPath(relativePath: string): string { } async function handleDirectoryBridgeCleanup( - cwd: string, + outputRoot: string, bridge: DirectoryBridge, writtenFilenames: Set, write: boolean, @@ -81,16 +86,51 @@ async function handleDirectoryBridgeCleanup( return []; } - const outputDir = join(cwd, bridge.outputDir); + const outputDir = join(outputRoot, bridge.outputDir); return cleanStaleFiles(outputDir, bridge.filePrefix, bridge.fileExtension, writtenFilenames); } +interface CompileContext { + configRoot: string; + outputRoot: string; + globalMode: boolean; +} + +async function resolveCompileContext(cwd: string): Promise { + const projectConfigPath = join(cwd, '.dwf', 'config.yml'); + if (await fileExists(projectConfigPath)) { + return { + configRoot: cwd, + outputRoot: cwd, + globalMode: false, + }; + } + + const inGlobalConfigDir = basename(cwd) === '.dwf'; + const globalConfigPath = join(cwd, 'config.yml'); + if (inGlobalConfigDir && await fileExists(globalConfigPath)) { + return { + configRoot: cwd, + outputRoot: homedir(), + globalMode: true, + }; + } + + throw new Error('.dwf/config.yml not found. Run devw init to initialize the project'); +} + export async function executePipeline(options: PipelineOptions): Promise { const { cwd, tool, write = true } = options; const startTime = performance.now(); + const context = await resolveCompileContext(cwd); - const config = await readConfig(cwd); - const rules = await readRules(cwd); + const config = context.globalMode ? await readConfigFromDwfDir(context.configRoot) : await readConfig(context.configRoot); + const projectRules = await readRules(context.configRoot); + const globalRules = context.globalMode || config.global === false + ? [] + : await readRules(context.configRoot, join(homedir(), '.dwf', 'rules')); + const rules = mergeRules(globalRules, projectRules); + const overriddenRuleIds = getOverriddenRuleIds(globalRules, projectRules); let toolIds = config.tools; if (tool) { @@ -103,9 +143,9 @@ export async function executePipeline(options: PipelineOptions): Promise 0) { - const actions = await migrateLegacyFiles(cwd, legacyFiles); + const actions = await migrateLegacyFiles(context.outputRoot, legacyFiles); migration.actions = actions; } } @@ -125,7 +165,7 @@ export async function executePipeline(options: PipelineOptions): Promise 0) { staleResults.push({ bridgeId: bridge.id, deleted }); } @@ -143,11 +183,11 @@ export async function executePipeline(options: PipelineOptions): Promise 0) { staleResults.push({ bridgeId: bridge.id, deleted }); } @@ -171,7 +211,7 @@ export async function executePipeline(options: PipelineOptions): Promise { const cwd = process.cwd(); - if (!(await fileExists(join(cwd, '.dwf', 'config.yml')))) { - ui.error('.dwf/config.yml not found', 'Run devw init to initialize the project'); - process.exitCode = 1; - return; - } - try { + const context = await resolveCompileContext(cwd); + if (options.verbose) { - const config = await readConfig(cwd); - const rules = await readRules(cwd); + const config = context.globalMode ? await readConfigFromDwfDir(context.configRoot) : await readConfig(context.configRoot); + const projectRules = await readRules(context.configRoot); + const globalRules = context.globalMode || config.global === false + ? [] + : await readRules(context.configRoot, join(homedir(), '.dwf', 'rules')); + const mergedRules = mergeRules(globalRules, projectRules); + const overriddenRuleIds = getOverriddenRuleIds(globalRules, projectRules); + ui.keyValue('Project:', chalk.bold(config.project.name)); + ui.keyValue('Scope:', context.globalMode ? 'global (~/.dwf)' : 'project (.dwf)'); ui.keyValue('Mode:', config.mode); - ui.keyValue('Rules:', String(rules.length)); + ui.keyValue('Project rules:', String(projectRules.length)); + if (config.global === false) { + ui.keyValue('Global rules:', 'disabled by config'); + } else { + ui.keyValue('Global rules:', String(globalRules.length)); + } + ui.keyValue('Merged rules:', String(mergedRules.length)); + if (overriddenRuleIds.length > 0) { + ui.keyValue('Project overrides:', String(overriddenRuleIds.length)); + } const toolIds = options.tool ? [options.tool] : config.tools; ui.keyValue('Tools:', chalk.cyan(toolIds.join(', '))); ui.newline(); @@ -359,6 +414,9 @@ export async function runCompile(options: CompileOptions): Promise { ui.newline(); ui.success(`Compiled ${String(result.activeRuleCount)} rules ${ICONS.arrow} ${String(allPaths.length)} file${allPaths.length !== 1 ? 's' : ''} ${ui.timing(result.elapsedMs)}`); ui.info(`Canonical files: ${String(result.canonicalFileCount)}`); + if (options.verbose && result.overriddenRuleIds.length > 0) { + ui.info(`Project overrides (${String(result.overriddenRuleIds.length)}): ${result.overriddenRuleIds.join(', ')}`); + } ui.newline(); if (options.verbose) { @@ -393,6 +451,22 @@ export async function runCompileFromAdd(): Promise { await runCompile({}); } +function getOverriddenRuleIds(globalRules: Rule[], projectRules: Rule[]): string[] { + const globalIds = new Set(globalRules.map((rule) => rule.id)); + const orderedOverrides: string[] = []; + const seen = new Set(); + + for (const rule of projectRules) { + if (!globalIds.has(rule.id) || seen.has(rule.id)) { + continue; + } + seen.add(rule.id); + orderedOverrides.push(rule.id); + } + + return orderedOverrides; +} + export function registerCompileCommand(program: Command): void { program .command('compile') diff --git a/packages/cli/src/commands/init.ts b/packages/cli/src/commands/init.ts index c5ad522..113fb53 100644 --- a/packages/cli/src/commands/init.ts +++ b/packages/cli/src/commands/init.ts @@ -1,5 +1,6 @@ import { mkdir, writeFile, readFile, appendFile } from 'node:fs/promises'; import { join, basename } from 'node:path'; +import { homedir } from 'node:os'; import type { Command } from 'commander'; import { stringify } from 'yaml'; import chalk from 'chalk'; @@ -12,6 +13,7 @@ import { fileExists } from '../utils/fs.js'; export interface InitOptions { tools?: string; mode?: 'copy' | 'link'; + global?: boolean; yes?: boolean; preset?: string; } @@ -108,20 +110,36 @@ async function appendToGitignore(cwd: string): Promise { } } -async function runInit(options: InitOptions): Promise { - const cwd = process.cwd(); - const dwfDir = join(cwd, '.dwf'); +type InitScope = 'project' | 'global'; - if (await fileExists(dwfDir)) { - ui.error('.dwf/ already exists in this directory', 'Remove it first or run from a different directory'); - process.exitCode = 1; - return; +async function resolveInitScope(options: InitOptions): Promise { + if (options.global) { + return 'global'; + } + + if (options.yes) { + return 'project'; } + return select({ + message: 'Where do you want to set up devw?', + choices: [ + { name: 'This project (.dwf/)', value: 'project' as const }, + { name: 'Global (~/.dwf/)', value: 'global' as const }, + ], + }); +} + +export async function runInit(options: InitOptions): Promise { + const cwd = process.cwd(); + + let scope: InitScope; let tools: ToolId[]; let mode: 'copy' | 'link'; try { - tools = await resolveTools(options, cwd); + scope = await resolveInitScope(options); + const toolDetectRoot = scope === 'global' ? homedir() : cwd; + tools = await resolveTools(options, toolDetectRoot); mode = await resolveMode(options); } catch (err) { if (err instanceof Error && err.name === 'ExitPromptError') return; @@ -129,7 +147,20 @@ async function runInit(options: InitOptions): Promise { process.exitCode = 1; return; } - const projectName = basename(cwd); + + const rootDir = scope === 'global' ? homedir() : cwd; + const dwfDir = join(rootDir, '.dwf'); + + if (await fileExists(dwfDir)) { + const locationHint = scope === 'global' + ? '~/.dwf/ already exists in your home directory' + : '.dwf/ already exists in this directory'; + ui.error(locationHint, 'Remove it first or run from a different directory'); + process.exitCode = 1; + return; + } + + const projectName = scope === 'global' ? 'global' : basename(cwd); // Create .dwf/rules/ and .dwf/assets/ const rulesDir = join(dwfDir, 'rules'); @@ -138,10 +169,11 @@ async function runInit(options: InitOptions): Promise { // Write config.yml const config = { - version: '0.1', + version: '0.2', project: { name: projectName }, tools, mode, + global: true, blocks: [] as string[], }; const configContent = `# Dev Workflows configuration\n${stringify(config)}`; @@ -152,16 +184,22 @@ async function runInit(options: InitOptions): Promise { await writeFile(join(rulesDir, `${scope}.yml`), buildRuleFileContent(scope), 'utf-8'); } - // Append .dwf/.cache/ to .gitignore - await appendToGitignore(cwd); + // Ensure canonical global output dir exists for global mode. + if (scope === 'global') { + await mkdir(join(rootDir, '.agents', 'rules', 'devw'), { recursive: true }); + } else { + // Append .dwf/.cache/ to .gitignore for project mode. + await appendToGitignore(cwd); + } // Success summary ui.newline(); ui.header('dev-workflows'); ui.newline(); - ui.success('Initialized .dwf/ successfully'); + ui.success(`Initialized ${scope === 'global' ? '~/.dwf/' : '.dwf/'} successfully`); ui.newline(); ui.keyValue('Project:', chalk.bold(projectName)); + ui.keyValue('Scope:', scope); ui.keyValue('Tools:', chalk.cyan(tools.join(', '))); ui.keyValue('Mode:', mode); ui.newline(); @@ -169,7 +207,7 @@ async function runInit(options: InitOptions): Promise { ui.newline(); console.log(` 1. Browse available rules ${chalk.cyan('devw add --list')}`); console.log(` 2. Add a rule ${chalk.cyan('devw add /')}`); - console.log(` 3. Or write your own rules in ${chalk.cyan('.dwf/rules/')}`); + console.log(` 3. Or write your own rules in ${chalk.cyan(scope === 'global' ? '~/.dwf/rules/' : '.dwf/rules/')}`); console.log(` 4. When ready, compile ${chalk.cyan('devw compile')}`); if (options.preset) { @@ -187,9 +225,10 @@ async function runInit(options: InitOptions): Promise { export function registerInitCommand(program: Command): void { program .command('init') - .description('Initialize .dwf/ in the current project') + .description('Initialize .dwf/ in this project or globally') .option('--tools ', 'Comma-separated list of tools (claude,cursor,gemini)') .option('--mode ', 'Output mode: copy or link') + .option('--global', 'Initialize global config in ~/.dwf/') .option('--preset ', 'Install a preset after initialization (e.g., spec-driven)') .option('-y, --yes', 'Accept all defaults') .action((options: InitOptions) => runInit(options)); diff --git a/packages/cli/src/core/merge.ts b/packages/cli/src/core/merge.ts new file mode 100644 index 0000000..70ebd20 --- /dev/null +++ b/packages/cli/src/core/merge.ts @@ -0,0 +1,8 @@ +import type { Rule } from '../bridges/types.js'; + +export function mergeRules(globalRules: Rule[], projectRules: Rule[]): Rule[] { + const projectRuleIds = new Set(projectRules.map((rule) => rule.id)); + const mergedGlobalRules = globalRules.filter((rule) => !projectRuleIds.has(rule.id)); + + return [...mergedGlobalRules, ...projectRules]; +} diff --git a/packages/cli/src/core/parser.ts b/packages/cli/src/core/parser.ts index 1b6d6ea..7eefd32 100644 --- a/packages/cli/src/core/parser.ts +++ b/packages/cli/src/core/parser.ts @@ -25,7 +25,11 @@ interface RawRuleFile { } export async function readConfig(cwd: string): Promise { - const configPath = join(cwd, '.dwf', 'config.yml'); + return readConfigFromDwfDir(join(cwd, '.dwf')); +} + +export async function readConfigFromDwfDir(dwfDir: string): Promise { + const configPath = join(dwfDir, 'config.yml'); const raw = await readFile(configPath, 'utf-8'); const parsed: unknown = parse(raw); @@ -168,8 +172,12 @@ function extractScopeMetadata(doc: RawRuleFile, file: string): ScopeMetadata | u return metadata; } -export async function readRules(cwd: string): Promise { - const rulesDir = join(cwd, '.dwf', 'rules'); +export async function readRules(cwd: string, rulesPath?: string): Promise { + const rulesDir = resolveRulesDir(cwd, rulesPath); + if (!rulesDir) { + return []; + } + let entries: string[]; try { entries = await readdir(rulesDir); @@ -215,3 +223,19 @@ export async function readRules(cwd: string): Promise { return allRules; } + +function resolveRulesDir(cwd: string, rulesPath?: string): string { + if (rulesPath) { + return rulesPath; + } + + const lastSegment = cwd.split(/[\\/]/).at(-1); + if (lastSegment === '.dwf') { + return join(cwd, 'rules'); + } + if (lastSegment === 'rules') { + return cwd; + } + + return join(cwd, '.dwf', 'rules'); +} diff --git a/packages/cli/tests/commands/compile.test.ts b/packages/cli/tests/commands/compile.test.ts index 255754a..4098556 100644 --- a/packages/cli/tests/commands/compile.test.ts +++ b/packages/cli/tests/commands/compile.test.ts @@ -596,3 +596,113 @@ describe('executePipeline dry-run', () => { assert.ok(!(await fileExists(join(tmpDir, '.github', 'copilot-instructions.md')))); }); }); + +describe('executePipeline global scope integration', () => { + let tmpDir: string; + let previousHome: string | undefined; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-global-')); + previousHome = process.env['HOME']; + }); + + afterEach(async () => { + if (previousHome === undefined) { + delete process.env['HOME']; + } else { + process.env['HOME'] = previousHome; + } + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('ignores global rules when project config has global: false', async () => { + const fakeHome = join(tmpDir, 'home'); + process.env['HOME'] = fakeHome; + + const projectDir = join(tmpDir, 'project'); + await setupProject( + projectDir, + `version: "0.2" +project: + name: "test-project" +tools: + - claude +mode: copy +global: false +blocks: [] +`, + { + 'conventions.yml': `scope: conventions +rules: + - id: project-rule + severity: error + content: Project rule content. +`, + }, + ); + + await mkdir(join(fakeHome, '.dwf', 'rules'), { recursive: true }); + await writeFile( + join(fakeHome, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: global-rule + severity: error + content: Global rule content. +`, + 'utf-8', + ); + + const result = await executePipeline({ cwd: projectDir, tool: 'claude' }); + + assert.equal(result.globalRuleCount, 0); + assert.equal(result.projectRuleCount, 1); + assert.equal(result.activeRuleCount, 1); + + const compiled = await readFile(join(projectDir, '.claude', 'rules', 'dwf-conventions.md'), 'utf-8'); + assert.ok(compiled.includes('Project rule content.')); + assert.ok(!compiled.includes('Global rule content.')); + }); + + it('writes native and canonical outputs to home directories in global mode', async () => { + const fakeHome = join(tmpDir, 'home'); + process.env['HOME'] = fakeHome; + + const globalDwfDir = join(fakeHome, '.dwf'); + await mkdir(join(globalDwfDir, 'rules'), { recursive: true }); + await writeFile( + join(globalDwfDir, 'config.yml'), + `version: "0.2" +project: + name: "global" +tools: + - claude +mode: copy +global: true +blocks: [] +`, + 'utf-8', + ); + await writeFile( + join(globalDwfDir, 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: global-rule + severity: error + content: Home-level global rule. +`, + 'utf-8', + ); + + await mkdir(join(fakeHome, '.claude', 'rules'), { recursive: true }); + await writeFile(join(fakeHome, '.claude', 'rules', 'dwf-testing.md'), 'stale', 'utf-8'); + + const result = await executePipeline({ cwd: globalDwfDir, tool: 'claude' }); + + assert.ok(await fileExists(join(fakeHome, '.claude', 'rules', 'dwf-conventions.md'))); + assert.ok(await fileExists(join(fakeHome, '.agents', 'rules', 'devw', 'dwf-conventions.md'))); + assert.ok(!(await fileExists(join(globalDwfDir, '.claude', 'rules', 'dwf-conventions.md')))); + assert.ok(!(await fileExists(join(fakeHome, '.claude', 'rules', 'dwf-testing.md')))); + assert.ok(result.staleResults.some((entry) => entry.bridgeId === 'claude')); + }); +}); diff --git a/packages/cli/tests/commands/init.test.ts b/packages/cli/tests/commands/init.test.ts new file mode 100644 index 0000000..f5438ca --- /dev/null +++ b/packages/cli/tests/commands/init.test.ts @@ -0,0 +1,75 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, readFile, access, mkdir } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { runInit } from '../../src/commands/init.js'; + +async function fileExists(filePath: string): Promise { + try { + await access(filePath); + return true; + } catch { + return false; + } +} + +describe('runInit', () => { + let tmpDir: string; + let projectDir: string; + let fakeHome: string; + let previousCwd: string; + let previousHome: string | undefined; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-init-')); + projectDir = join(tmpDir, 'project'); + fakeHome = join(tmpDir, 'home'); + previousCwd = process.cwd(); + previousHome = process.env['HOME']; + + await rm(projectDir, { recursive: true, force: true }); + await rm(fakeHome, { recursive: true, force: true }); + await Promise.all([ + mkdir(projectDir, { recursive: true }), + mkdir(fakeHome, { recursive: true }), + ]); + + process.env['HOME'] = fakeHome; + process.chdir(projectDir); + process.exitCode = 0; + }); + + afterEach(async () => { + process.chdir(previousCwd); + if (previousHome === undefined) { + delete process.env['HOME']; + } else { + process.env['HOME'] = previousHome; + } + process.exitCode = 0; + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('initializes project mode by default with -y', async () => { + await runInit({ tools: 'claude', mode: 'copy', yes: true }); + + assert.ok(await fileExists(join(projectDir, '.dwf', 'config.yml'))); + assert.ok(await fileExists(join(projectDir, '.dwf', 'rules', 'conventions.yml'))); + assert.ok(!(await fileExists(join(fakeHome, '.dwf', 'config.yml')))); + }); + + it('initializes global mode with --global and creates canonical directory', async () => { + await runInit({ global: true, tools: 'claude', mode: 'copy', yes: true }); + + const globalConfigPath = join(fakeHome, '.dwf', 'config.yml'); + assert.ok(await fileExists(globalConfigPath)); + assert.ok(await fileExists(join(fakeHome, '.dwf', 'rules', 'conventions.yml'))); + assert.ok(await fileExists(join(fakeHome, '.agents', 'rules', 'devw'))); + assert.ok(!(await fileExists(join(projectDir, '.dwf', 'config.yml')))); + + const config = await readFile(globalConfigPath, 'utf-8'); + assert.ok(config.includes('version: "0.2"')); + assert.ok(config.includes('global: true')); + }); +}); diff --git a/packages/cli/tests/core/merge.test.ts b/packages/cli/tests/core/merge.test.ts new file mode 100644 index 0000000..c30095a --- /dev/null +++ b/packages/cli/tests/core/merge.test.ts @@ -0,0 +1,55 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import type { Rule } from '../../src/bridges/types.js'; +import { mergeRules } from '../../src/core/merge.js'; + +function makeRule(id: string, content: string): Rule { + return { + id, + scope: 'conventions', + severity: 'error', + content, + enabled: true, + }; +} + +describe('mergeRules', () => { + it('includes global and project rules when there are no conflicts', () => { + const globalRules = [ + makeRule('g1', 'global one'), + makeRule('g2', 'global two'), + ]; + const projectRules = [ + makeRule('p1', 'project one'), + makeRule('p2', 'project two'), + ]; + + const merged = mergeRules(globalRules, projectRules); + + assert.deepEqual(merged.map((rule) => rule.id), ['g1', 'g2', 'p1', 'p2']); + }); + + it('prefers project rules when IDs conflict', () => { + const globalRules = [ + makeRule('strict-types', 'global version'), + makeRule('g2', 'global two'), + ]; + const projectRules = [ + makeRule('strict-types', 'project version'), + makeRule('p2', 'project two'), + ]; + + const merged = mergeRules(globalRules, projectRules); + + assert.equal(merged.length, 3); + const strictTypes = merged.find((rule) => rule.id === 'strict-types'); + assert.ok(strictTypes); + assert.equal(strictTypes.content, 'project version'); + }); + + it('handles empty arrays', () => { + assert.deepEqual(mergeRules([], []), []); + assert.deepEqual(mergeRules([makeRule('g1', 'global')], []).map((rule) => rule.id), ['g1']); + assert.deepEqual(mergeRules([], [makeRule('p1', 'project')]).map((rule) => rule.id), ['p1']); + }); +}); From 9d9738d47cbc5eccd54fc3ed7b59b06a51ce66bc Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 17:21:21 +0200 Subject: [PATCH 16/18] feat(cli): complete phase 4 visual overhaul --- packages/cli/package.json | 4 +- packages/cli/src/commands/add.ts | 121 ++++--- packages/cli/src/commands/compile.ts | 46 ++- packages/cli/src/commands/explain.ts | 4 +- packages/cli/src/commands/init.ts | 94 +++-- packages/cli/src/commands/list.ts | 20 +- packages/cli/src/commands/menu.ts | 84 ++--- packages/cli/src/commands/remove.ts | 36 +- packages/cli/src/commands/watch.ts | 4 +- packages/cli/src/index.ts | 10 + packages/cli/src/utils/banner.ts | 34 ++ packages/cli/src/utils/prompt.ts | 178 +++++++++ packages/cli/src/utils/table.ts | 59 +++ packages/cli/src/utils/ui.ts | 36 +- packages/cli/tests/commands/menu.test.ts | 27 ++ packages/cli/tests/ui/output.test.ts | 36 +- packages/cli/tests/utils/banner.test.ts | 47 +++ .../cli/tests/utils/legacy-imports.test.ts | 45 +++ packages/cli/tests/utils/prompt.test.ts | 150 ++++++++ packages/cli/tests/utils/table.test.ts | 36 ++ pnpm-lock.yaml | 341 ++---------------- 21 files changed, 912 insertions(+), 500 deletions(-) create mode 100644 packages/cli/src/utils/banner.ts create mode 100644 packages/cli/src/utils/prompt.ts create mode 100644 packages/cli/src/utils/table.ts create mode 100644 packages/cli/tests/utils/banner.test.ts create mode 100644 packages/cli/tests/utils/legacy-imports.test.ts create mode 100644 packages/cli/tests/utils/prompt.test.ts create mode 100644 packages/cli/tests/utils/table.test.ts diff --git a/packages/cli/package.json b/packages/cli/package.json index 1548aaf..804b313 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -52,11 +52,11 @@ "test:e2e": "tsc && tsc -p tsconfig.test.json && find .test-build/tests/e2e -name '*.test.js' -exec node --test {} +" }, "dependencies": { - "@inquirer/prompts": "^7.0.0", + "@clack/prompts": "^0.9.0", "chokidar": "^3.6.0", "commander": "^13.0.0", "yaml": "^2.7.0", - "chalk": "^5.4.0" + "picocolors": "^1.1.0" }, "devDependencies": { "typescript": "^5.7.0", diff --git a/packages/cli/src/commands/add.ts b/packages/cli/src/commands/add.ts index bbc75ad..0fee685 100644 --- a/packages/cli/src/commands/add.ts +++ b/packages/cli/src/commands/add.ts @@ -1,14 +1,22 @@ import { join } from 'node:path'; import { readFile, writeFile, mkdir } from 'node:fs/promises'; import type { Command } from 'commander'; -import chalk from 'chalk'; +import pc from 'picocolors'; import { stringify, parse } from 'yaml'; -import { select, checkbox, confirm } from '@inquirer/prompts'; import { fetchRawContent, fetchContent, listDirectory, listContentDirectory } from '../utils/github.js'; import { convert } from '../core/converter.js'; import { isAssetType, parseAssetFrontmatter } from '../core/assets.js'; import { fileExists } from '../utils/fs.js'; import { readConfig } from '../core/parser.js'; +import { + selectPrompt, + multiselectPrompt, + confirmPrompt, + introPrompt, + outroPrompt, + spinnerTask, + isInteractiveSession, +} from '../utils/prompt.js'; import * as cache from '../utils/cache.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; @@ -58,7 +66,10 @@ export async function fetchRegistry(cwd: string): Promise let topLevel; try { - topLevel = await listDirectory(); + topLevel = await spinnerTask({ + label: 'Fetching rule categories', + task: async () => listDirectory(), + }); } catch (err) { const msg = err instanceof Error ? err.message : String(err); ui.error(`Could not fetch rule registry: ${msg}`); @@ -129,15 +140,15 @@ async function runList(categoryFilter: string | undefined): Promise { ui.newline(); for (const category of displayCategories) { - console.log(` ${chalk.cyan(`${category.name}/`)}`); + console.log(` ${pc.cyan(`${category.name}/`)}`); for (const rule of category.rules) { - const desc = rule.description ? chalk.dim(` ${rule.description}`) : ''; - console.log(` ${chalk.white(rule.name.padEnd(20))}${desc}`); + const desc = rule.description ? pc.dim(` ${rule.description}`) : ''; + console.log(` ${pc.white(rule.name.padEnd(20))}${desc}`); } ui.newline(); } - console.log(` ${chalk.dim(`Add a rule: devw add /`)}`); + console.log(` ${pc.dim(`Add a rule: devw add /`)}`); // Show available assets if not filtering by category if (!categoryFilter) { @@ -161,13 +172,13 @@ async function runList(categoryFilter: string | undefined): Promise { const names = result.value.filter((e) => e.type === 'file').map((e) => e.name); if (names.length === 0) continue; const singular = type.replace(/s$/, ''); - console.log(` ${chalk.cyan(`${singular}/`)}`); + console.log(` ${pc.cyan(`${singular}/`)}`); for (const name of names) { - console.log(` ${chalk.white(name)}`); + console.log(` ${pc.white(name)}`); } ui.newline(); } - console.log(` ${chalk.dim(`Add an asset: devw add command/`)}`); + console.log(` ${pc.dim(`Add an asset: devw add command/`)}`); } } } @@ -265,7 +276,10 @@ export async function downloadAndInstallAsset( let content: string; try { - content = await fetchContent(getAssetContentPath(type, name)); + content = await spinnerTask({ + label: `Fetching ${source}`, + task: async () => fetchContent(getAssetContentPath(type, name)), + }); } catch (err) { const msg = err instanceof Error ? err.message : String(err); ui.error(msg); @@ -290,9 +304,9 @@ export async function downloadAndInstallAsset( if (!options.force) { ui.info(`${source} already exists locally`); try { - const shouldOverwrite = await confirm({ + const shouldOverwrite = await confirmPrompt({ message: 'Overwrite?', - default: true, + defaultValue: true, }); if (!shouldOverwrite) { ui.error('Cancelled'); @@ -309,7 +323,7 @@ export async function downloadAndInstallAsset( ui.newline(); ui.header('Dry run — would write:'); ui.newline(); - console.log(chalk.dim(` .dwf/assets/${type}s/${fileName}`)); + console.log(pc.dim(` .dwf/assets/${type}s/${fileName}`)); return false; } @@ -342,7 +356,10 @@ async function downloadAndInstall( let markdown: string; try { - markdown = await fetchRawContent(source); + markdown = await spinnerTask({ + label: `Fetching ${source}`, + task: async () => fetchRawContent(source), + }); } catch (err) { const msg = err instanceof Error ? err.message : String(err); ui.error(msg); @@ -376,10 +393,10 @@ async function downloadAndInstall( ui.newline(); ui.info(`${source} already exists locally (v${existingVersion} ${ICONS.arrow} v${result.version})`); try { - const shouldOverwrite = await confirm({ - message: 'Overwrite with new version?', - default: true, - }); + const shouldOverwrite = await confirmPrompt({ + message: 'Overwrite with new version?', + defaultValue: true, + }); if (!shouldOverwrite) { ui.error('Cancelled'); return false; @@ -401,7 +418,7 @@ async function downloadAndInstall( ui.newline(); ui.header('Dry run — would write:'); ui.newline(); - console.log(chalk.dim(` ${fileName}`)); + console.log(pc.dim(` ${fileName}`)); ui.newline(); console.log(yamlOutput); return false; @@ -422,15 +439,16 @@ async function downloadAndInstall( } async function runInteractiveAsset(cwd: string, options: AddOptions): Promise { + introPrompt('Add assets'); let assetType: AssetType | 'preset'; try { - assetType = await select({ + assetType = await selectPrompt({ message: 'Asset type', - choices: [ - { name: 'command — Slash commands for Claude Code', value: 'command' }, - { name: 'template — Spec and document templates', value: 'template' }, - { name: 'hook — Editor hooks (auto-format, etc.)', value: 'hook' }, - { name: 'preset — Bundle of rules + assets', value: 'preset' }, + options: [ + { label: 'command — Slash commands for Claude Code', value: 'command' }, + { label: 'template — Spec and document templates', value: 'template' }, + { label: 'hook — Editor hooks (auto-format, etc.)', value: 'hook' }, + { label: 'preset — Bundle of rules + assets', value: 'preset' }, ], }); } catch { @@ -458,9 +476,9 @@ async function runInteractiveAsset(cwd: string, options: AddOptions): Promise({ + selected = await multiselectPrompt({ message: `Select ${assetType}s to install`, - choices: names.map((name) => ({ name, value: name })), + options: names.map((name) => ({ label: name, value: name })), }); } catch { ui.error('Cancelled'); @@ -487,16 +505,19 @@ async function runInteractiveAsset(cwd: string, options: AddOptions): Promise { + introPrompt('Add rules or assets'); let mode: 'rules' | 'assets'; try { - mode = await select<'rules' | 'assets'>({ + mode = await selectPrompt<'rules' | 'assets'>({ message: 'What do you want to add?', - choices: [ - { name: 'Rules — Install rules from the registry', value: 'rules' }, - { name: 'Assets — Commands, templates, hooks, presets', value: 'assets' }, + options: [ + { label: 'Rules — Install rules from the registry', value: 'rules' }, + { label: 'Assets — Commands, templates, hooks, presets', value: 'assets' }, ], }); } catch { @@ -538,15 +559,15 @@ async function runInteractive(cwd: string, options: AddOptions): Promise { ); if (availableCategories.length === 0) break; - const selectedCategoryName = await select({ + const selectedCategoryName = await selectPrompt({ message: 'Choose a category', - choices: availableCategories.map((c) => { + options: availableCategories.map((c) => { const allInstalled = c.rules.every((r) => installedPaths.has(`${c.name}/${r.name}`), ); const label = `${c.name} (${pluralRules(c.rules.length)})`; return { - name: allInstalled ? `${label} ${chalk.dim('(all installed)')}` : label, + label: allInstalled ? `${label} ${pc.dim('(all installed)')}` : label, value: c.name, }; }), @@ -555,17 +576,17 @@ async function runInteractive(cwd: string, options: AddOptions): Promise { const category = registry.categories.find((c) => c.name === selectedCategoryName); if (!category) break; - const selected = await checkbox({ + const selected = await multiselectPrompt({ message: 'Select rules to add', - choices: [ - { name: '\u2190 Back to categories', value: BACK_VALUE }, + options: [ + { label: '\u2190 Back to categories', value: BACK_VALUE }, ...category.rules.map((r) => { const path = `${category.name}/${r.name}`; const installed = installedPaths.has(path); const desc = r.description ? ` ${ICONS.dash} ${r.description}` : ''; - const suffix = installed ? chalk.dim(' (already installed)') : ''; + const suffix = installed ? pc.dim(' (already installed)') : ''; return { - name: `${r.name}${desc}${suffix}`, + label: `${r.name}${desc}${suffix}`, value: r.name, }; }), @@ -595,9 +616,9 @@ async function runInteractive(cwd: string, options: AddOptions): Promise { ); if (remaining.length === 0) break; - const addMore = await confirm({ + const addMore = await confirmPrompt({ message: 'Add rules from another category?', - default: true, + defaultValue: true, }); if (!addMore) break; } @@ -611,15 +632,15 @@ async function runInteractive(cwd: string, options: AddOptions): Promise { ui.newline(); ui.header('Rules to install:'); for (const rule of allSelected) { - const desc = rule.description ? chalk.dim(` ${ICONS.dash} ${rule.description}`) : ''; + const desc = rule.description ? pc.dim(` ${ICONS.dash} ${rule.description}`) : ''; console.log(` ${rule.category}/${rule.name}${desc}`); } ui.newline(); try { - const shouldProceed = await confirm({ + const shouldProceed = await confirmPrompt({ message: `Install ${pluralRules(allSelected.length)}?`, - default: true, + defaultValue: true, }); if (!shouldProceed) { ui.error('Cancelled'); @@ -640,6 +661,8 @@ async function runInteractive(cwd: string, options: AddOptions): Promise { const { runCompileFromAdd } = await import('./compile.js'); await runCompileFromAdd(); } + + outroPrompt('Add flow completed'); } interface PresetManifest { @@ -732,7 +755,7 @@ export async function runAdd(ruleArg: string | undefined, options: AddOptions): } if (!ruleArg) { - if (!process.stdout.isTTY || !process.stdin.isTTY) { + if (!isInteractiveSession()) { ui.error('No rule specified', 'Usage: devw add /'); process.exitCode = 1; return; @@ -742,6 +765,10 @@ export async function runAdd(ruleArg: string | undefined, options: AddOptions): return; } + if (isInteractiveSession()) { + introPrompt('Adding item'); + } + if (!ruleArg.includes('/')) { const dashIdx = ruleArg.indexOf('-'); const hint = @@ -792,6 +819,8 @@ export async function runAdd(ruleArg: string | undefined, options: AddOptions): const { runCompileFromAdd } = await import('./compile.js'); await runCompileFromAdd(); } + + outroPrompt('Add command completed'); } export function registerAddCommand(program: Command): void { diff --git a/packages/cli/src/commands/compile.ts b/packages/cli/src/commands/compile.ts index 343ead1..f206c42 100644 --- a/packages/cli/src/commands/compile.ts +++ b/packages/cli/src/commands/compile.ts @@ -2,7 +2,7 @@ import { mkdir, writeFile, readFile, symlink, unlink } from 'node:fs/promises'; import { join, dirname, basename } from 'node:path'; import { homedir } from 'node:os'; import type { Command } from 'commander'; -import chalk from 'chalk'; +import pc from 'picocolors'; import { readConfig, readConfigFromDwfDir, readRules } from '../core/parser.js'; import { mergeRules } from '../core/merge.js'; import { computeRulesHash, writeHash } from '../core/hash.js'; @@ -21,6 +21,7 @@ import { buildCanonicalOutputs, writeCanonical } from '../core/canonical.js'; import { fileExists } from '../utils/fs.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; +import { renderTable } from '../utils/table.js'; export interface CompileOptions { tool?: string; @@ -96,6 +97,32 @@ interface CompileContext { globalMode: boolean; } +function toCompileSummaryRows(result: CompileResult): string[][] { + const counts = new Map(); + + for (const output of result.results) { + const current = counts.get(output.bridgeId) ?? { success: 0, failed: 0 }; + if (output.success) { + current.success += 1; + } else { + current.failed += 1; + } + counts.set(output.bridgeId, current); + } + + const rows: string[][] = []; + for (const [bridgeId, count] of counts.entries()) { + rows.push([ + bridgeId, + String(count.success), + String(count.failed), + ]); + } + + rows.sort((a, b) => a[0]!.localeCompare(b[0]!)); + return rows; +} + async function resolveCompileContext(cwd: string): Promise { const projectConfigPath = join(cwd, '.dwf', 'config.yml'); if (await fileExists(projectConfigPath)) { @@ -347,7 +374,7 @@ export async function runCompile(options: CompileOptions): Promise { const mergedRules = mergeRules(globalRules, projectRules); const overriddenRuleIds = getOverriddenRuleIds(globalRules, projectRules); - ui.keyValue('Project:', chalk.bold(config.project.name)); + ui.keyValue('Project:', pc.bold(config.project.name)); ui.keyValue('Scope:', context.globalMode ? 'global (~/.dwf)' : 'project (.dwf)'); ui.keyValue('Mode:', config.mode); ui.keyValue('Project rules:', String(projectRules.length)); @@ -361,7 +388,7 @@ export async function runCompile(options: CompileOptions): Promise { ui.keyValue('Project overrides:', String(overriddenRuleIds.length)); } const toolIds = options.tool ? [options.tool] : config.tools; - ui.keyValue('Tools:', chalk.cyan(toolIds.join(', '))); + ui.keyValue('Tools:', pc.cyan(toolIds.join(', '))); ui.newline(); } @@ -374,7 +401,7 @@ export async function runCompile(options: CompileOptions): Promise { for (const br of result.results) { if (br.content !== undefined) { - console.log(chalk.cyan(`--- ${br.outputPath} ---`)); + console.log(pc.cyan(`--- ${br.outputPath} ---`)); console.log(br.content); } } @@ -399,6 +426,12 @@ export async function runCompile(options: CompileOptions): Promise { ui.warn('Tool-specific outputs were still written'); } + const summaryTable = renderTable( + ['bridge', 'generated', 'failed'], + toCompileSummaryRows(result), + [10, 9, 6], + ); + // Show migration messages if any if (result.migration.actions.length > 0) { ui.newline(); @@ -414,6 +447,7 @@ export async function runCompile(options: CompileOptions): Promise { ui.newline(); ui.success(`Compiled ${String(result.activeRuleCount)} rules ${ICONS.arrow} ${String(allPaths.length)} file${allPaths.length !== 1 ? 's' : ''} ${ui.timing(result.elapsedMs)}`); ui.info(`Canonical files: ${String(result.canonicalFileCount)}`); + ui.log(summaryTable); if (options.verbose && result.overriddenRuleIds.length > 0) { ui.info(`Project overrides (${String(result.overriddenRuleIds.length)}): ${result.overriddenRuleIds.join(', ')}`); } @@ -424,7 +458,7 @@ export async function runCompile(options: CompileOptions): Promise { if (result.staleResults.length > 0) { ui.newline(); - console.log(` ${chalk.dim('Stale files removed:')}`); + console.log(` ${pc.dim('Stale files removed:')}`); for (const stale of result.staleResults) { for (const deleted of stale.deleted) { ui.info(` ${stale.bridgeId}: ${deleted}`); @@ -434,7 +468,7 @@ export async function runCompile(options: CompileOptions): Promise { if (result.assetPaths.length > 0) { ui.newline(); - console.log(` ${chalk.dim('Assets deployed:')}`); + console.log(` ${pc.dim('Assets deployed:')}`); ui.list(result.assetPaths); } } else { diff --git a/packages/cli/src/commands/explain.ts b/packages/cli/src/commands/explain.ts index 6611d24..c9f102c 100644 --- a/packages/cli/src/commands/explain.ts +++ b/packages/cli/src/commands/explain.ts @@ -1,6 +1,6 @@ import { join } from 'node:path'; import type { Command } from 'commander'; -import chalk from 'chalk'; +import pc from 'picocolors'; import { readConfig, readRules } from '../core/parser.js'; import type { Bridge, Rule } from '../bridges/types.js'; import { isMarkerBridge, isDirectoryBridge, getBridgeOutputPaths } from '../bridges/types.js'; @@ -53,7 +53,7 @@ function formatSeparator(toolId: string): string { const prefix = `${ICONS.separator}${ICONS.separator}`; const remaining = lineWidth - prefix.length - label.length; const suffix = ICONS.separator.repeat(Math.max(0, remaining)); - return chalk.dim(`${prefix}${label}${suffix}`); + return pc.dim(`${prefix}${label}${suffix}`); } async function runExplain(options: ExplainOptions): Promise { diff --git a/packages/cli/src/commands/init.ts b/packages/cli/src/commands/init.ts index 113fb53..54701b7 100644 --- a/packages/cli/src/commands/init.ts +++ b/packages/cli/src/commands/init.ts @@ -3,12 +3,20 @@ import { join, basename } from 'node:path'; import { homedir } from 'node:os'; import type { Command } from 'commander'; import { stringify } from 'yaml'; -import chalk from 'chalk'; -import { checkbox, select } from '@inquirer/prompts'; +import pc from 'picocolors'; import { detectTools, SUPPORTED_TOOLS } from '../utils/detect-tools.js'; import * as ui from '../utils/ui.js'; import type { ToolId } from '../utils/detect-tools.js'; import { fileExists } from '../utils/fs.js'; +import { + selectPrompt, + multiselectPrompt, + introPrompt, + notePrompt, + outroPrompt, + spinnerTask, + isInteractiveSession, +} from '../utils/prompt.js'; export interface InitOptions { tools?: string; @@ -56,13 +64,14 @@ async function resolveTools(options: InitOptions, cwd: string): Promise({ + const selected = await multiselectPrompt({ message: 'Which tools to configure?', - choices: SUPPORTED_TOOLS.map((id) => ({ - name: id, + options: SUPPORTED_TOOLS.map((id) => ({ + label: id, value: id, - checked: detectedIds.includes(id), + hint: detectedIds.includes(id) ? 'detected' : undefined, })), + initialValues: detectedIds, }); if (selected.length > 0) { @@ -85,11 +94,11 @@ async function resolveMode(options: InitOptions): Promise<'copy' | 'link'> { return 'copy'; } - const mode = await select<'copy' | 'link'>({ + const mode = await selectPrompt<'copy' | 'link'>({ message: 'Output mode', - choices: [ - { name: 'copy', value: 'copy' as const, description: 'Embed rules directly in tool config files' }, - { name: 'link', value: 'link' as const, description: 'Symlink tool config files to .dwf/ output' }, + options: [ + { label: 'copy', value: 'copy' as const, hint: 'Embed rules directly in tool config files' }, + { label: 'link', value: 'link' as const, hint: 'Symlink tool config files to .dwf/ output' }, ], }); @@ -121,17 +130,20 @@ async function resolveInitScope(options: InitOptions): Promise { return 'project'; } - return select({ + return selectPrompt({ message: 'Where do you want to set up devw?', - choices: [ - { name: 'This project (.dwf/)', value: 'project' as const }, - { name: 'Global (~/.dwf/)', value: 'global' as const }, + options: [ + { label: 'This project (.dwf/)', value: 'project' as const }, + { label: 'Global (~/.dwf/)', value: 'global' as const }, ], }); } export async function runInit(options: InitOptions): Promise { const cwd = process.cwd(); + if (isInteractiveSession() && !options.yes) { + introPrompt('Initialize dev-workflows'); + } let scope: InitScope; let tools: ToolId[]; @@ -162,10 +174,14 @@ export async function runInit(options: InitOptions): Promise { const projectName = scope === 'global' ? 'global' : basename(cwd); - // Create .dwf/rules/ and .dwf/assets/ const rulesDir = join(dwfDir, 'rules'); - await mkdir(rulesDir, { recursive: true }); - await mkdir(join(dwfDir, 'assets'), { recursive: true }); + await spinnerTask({ + label: 'Creating workspace folders', + task: async () => { + await mkdir(rulesDir, { recursive: true }); + await mkdir(join(dwfDir, 'assets'), { recursive: true }); + }, + }); // Write config.yml const config = { @@ -177,18 +193,32 @@ export async function runInit(options: InitOptions): Promise { blocks: [] as string[], }; const configContent = `# Dev Workflows configuration\n${stringify(config)}`; - await writeFile(join(dwfDir, 'config.yml'), configContent, 'utf-8'); + await spinnerTask({ + label: 'Writing config.yml', + task: async () => { + await writeFile(join(dwfDir, 'config.yml'), configContent, 'utf-8'); + }, + }); // Write empty rule files - for (const scope of BUILTIN_SCOPES) { - await writeFile(join(rulesDir, `${scope}.yml`), buildRuleFileContent(scope), 'utf-8'); - } + await spinnerTask({ + label: 'Scaffolding rule files', + task: async () => { + for (const scope of BUILTIN_SCOPES) { + await writeFile(join(rulesDir, `${scope}.yml`), buildRuleFileContent(scope), 'utf-8'); + } + }, + }); // Ensure canonical global output dir exists for global mode. if (scope === 'global') { - await mkdir(join(rootDir, '.agents', 'rules', 'devw'), { recursive: true }); + await spinnerTask({ + label: 'Preparing canonical global output', + task: async () => { + await mkdir(join(rootDir, '.agents', 'rules', 'devw'), { recursive: true }); + }, + }); } else { - // Append .dwf/.cache/ to .gitignore for project mode. await appendToGitignore(cwd); } @@ -198,17 +228,23 @@ export async function runInit(options: InitOptions): Promise { ui.newline(); ui.success(`Initialized ${scope === 'global' ? '~/.dwf/' : '.dwf/'} successfully`); ui.newline(); - ui.keyValue('Project:', chalk.bold(projectName)); + ui.keyValue('Project:', pc.bold(projectName)); ui.keyValue('Scope:', scope); - ui.keyValue('Tools:', chalk.cyan(tools.join(', '))); + ui.keyValue('Tools:', pc.cyan(tools.join(', '))); ui.keyValue('Mode:', mode); ui.newline(); ui.header("What's next"); ui.newline(); - console.log(` 1. Browse available rules ${chalk.cyan('devw add --list')}`); - console.log(` 2. Add a rule ${chalk.cyan('devw add /')}`); - console.log(` 3. Or write your own rules in ${chalk.cyan(scope === 'global' ? '~/.dwf/rules/' : '.dwf/rules/')}`); - console.log(` 4. When ready, compile ${chalk.cyan('devw compile')}`); + console.log(` 1. Browse available rules ${pc.cyan('devw add --list')}`); + console.log(` 2. Add a rule ${pc.cyan('devw add /')}`); + console.log(` 3. Or write your own rules in ${pc.cyan(scope === 'global' ? '~/.dwf/rules/' : '.dwf/rules/')}`); + console.log(` 4. When ready, compile ${pc.cyan('devw compile')}`); + + notePrompt( + `Project: ${projectName}\nScope: ${scope}\nTools: ${tools.join(', ')}\nMode: ${mode}`, + 'Initialized', + ); + outroPrompt(`Ready: ${scope === 'global' ? '~/.dwf/' : '.dwf/'}`); if (options.preset) { ui.newline(); diff --git a/packages/cli/src/commands/list.ts b/packages/cli/src/commands/list.ts index b01d736..6d083f4 100644 --- a/packages/cli/src/commands/list.ts +++ b/packages/cli/src/commands/list.ts @@ -1,6 +1,6 @@ import { join } from 'node:path'; import type { Command } from 'commander'; -import chalk from 'chalk'; +import pc from 'picocolors'; import { readConfig, readRules } from '../core/parser.js'; import { fileExists } from '../utils/fs.js'; import { claudeBridge } from '../bridges/claude.js'; @@ -46,17 +46,17 @@ async function listRules(): Promise { ui.header(`Active rules (${String(active.length)})`); ui.newline(); for (const rule of active) { - const severityIcon = rule.severity === 'error' ? chalk.red(ICONS.error) : rule.severity === 'warning' ? chalk.yellow(ICONS.warn) : chalk.dim(ICONS.dot); - const severityColor = rule.severity === 'error' ? chalk.red : rule.severity === 'warning' ? chalk.yellow : chalk.dim; + const severityIcon = rule.severity === 'error' ? pc.red(ICONS.error) : rule.severity === 'warning' ? pc.yellow(ICONS.warn) : pc.dim(ICONS.dot); + const severityColor = rule.severity === 'error' ? pc.red : rule.severity === 'warning' ? pc.yellow : pc.dim; let source = ''; if (rule.source) { - source = chalk.dim(` (pulled: ${rule.source})`); + source = pc.dim(` (pulled: ${rule.source})`); } else if (rule.sourceBlock) { - source = chalk.dim(` [${rule.sourceBlock}]`); + source = pc.dim(` [${rule.sourceBlock}]`); } else { - source = chalk.dim(` ${ICONS.arrow} manual`); + source = pc.dim(` ${ICONS.arrow} manual`); } - console.log(` ${severityIcon} ${severityColor(rule.severity.padEnd(8))}${chalk.cyan(rule.scope.padEnd(15))}${rule.id}${source}`); + console.log(` ${severityIcon} ${severityColor(rule.severity.padEnd(8))}${pc.cyan(rule.scope.padEnd(15))}${rule.id}${source}`); } } @@ -98,9 +98,9 @@ async function listTools(): Promise { } } if (outputLabel) { - console.log(` ${chalk.dim(ICONS.bullet)} ${chalk.cyan(tool.padEnd(12))}${chalk.dim(ICONS.arrow)} ${chalk.dim(outputLabel)}`); + console.log(` ${pc.dim(ICONS.bullet)} ${pc.cyan(tool.padEnd(12))}${pc.dim(ICONS.arrow)} ${pc.dim(outputLabel)}`); } else { - console.log(` ${chalk.dim(ICONS.bullet)} ${chalk.cyan(tool)}`); + console.log(` ${pc.dim(ICONS.bullet)} ${pc.cyan(tool)}`); } } } @@ -140,7 +140,7 @@ async function listAssets(typeFilter?: string): Promise { ui.newline(); for (const asset of filtered) { const outputHint = getAssetOutputHint(asset.type, asset.name); - console.log(` ${chalk.dim(ICONS.bullet)} ${chalk.cyan(asset.type.padEnd(10))} ${chalk.white(asset.name.padEnd(20))} ${chalk.dim(`v${asset.version}`)} ${chalk.dim(ICONS.arrow)} ${chalk.dim(outputHint)}`); + console.log(` ${pc.dim(ICONS.bullet)} ${pc.cyan(asset.type.padEnd(10))} ${pc.white(asset.name.padEnd(20))} ${pc.dim(`v${asset.version}`)} ${pc.dim(ICONS.arrow)} ${pc.dim(outputHint)}`); } } diff --git a/packages/cli/src/commands/menu.ts b/packages/cli/src/commands/menu.ts index 9fa6361..29868e0 100644 --- a/packages/cli/src/commands/menu.ts +++ b/packages/cli/src/commands/menu.ts @@ -1,19 +1,10 @@ -import { select } from '@inquirer/prompts'; -import chalk from 'chalk'; import type { Command } from 'commander'; import { runAdd } from './add.js'; import { runRemove } from './remove.js'; import { runDoctor } from './doctor.js'; import { runCompile } from './compile.js'; - -const menuTheme = { - style: { - keysHelpTip: (keys: [string, string][]): string => - [...keys, ['Ctrl+C', 'back']] - .map(([key, action]) => `${chalk.bold(key)} ${chalk.dim(action)}`) - .join(chalk.dim(' • ')), - }, -} as const; +import { renderBanner } from '../utils/banner.js'; +import { selectPrompt, introPrompt, outroPrompt, isInteractiveSession } from '../utils/prompt.js'; const MENU_CHOICES = { ADD: 'add', @@ -26,57 +17,48 @@ const MENU_CHOICES = { type MenuChoice = (typeof MENU_CHOICES)[keyof typeof MENU_CHOICES]; export async function runMainMenu(command: Command): Promise { - if (!process.stdout.isTTY || !process.stdin.isTTY) { + if (!isInteractiveSession()) { command.help(); return; } + const banner = renderBanner(); + if (banner.length > 0) { + console.log(banner); + } + introPrompt('Welcome to dev-workflows'); + while (true) { let choice: MenuChoice; - try { - choice = await select({ - message: 'What do you want to do?', - theme: menuTheme, - choices: [ - { name: 'Add rules or assets', value: MENU_CHOICES.ADD }, - { name: 'Compile for all editors', value: MENU_CHOICES.COMPILE }, - { name: 'Check project status', value: MENU_CHOICES.DOCTOR }, - { name: 'Remove something', value: MENU_CHOICES.REMOVE }, - { name: 'Exit', value: MENU_CHOICES.EXIT }, - ], - }); - } catch (err) { - if (err instanceof Error && err.name === 'ExitPromptError') { - process.exit(0); - } - throw err; - } + choice = await selectPrompt({ + message: 'What do you want to do?', + options: [ + { label: 'Add rules or assets', value: MENU_CHOICES.ADD }, + { label: 'Compile for all editors', value: MENU_CHOICES.COMPILE }, + { label: 'Check project status', value: MENU_CHOICES.DOCTOR }, + { label: 'Remove something', value: MENU_CHOICES.REMOVE }, + { label: 'Exit', value: MENU_CHOICES.EXIT }, + ], + }); if (choice === MENU_CHOICES.EXIT) { + outroPrompt('See you next time.'); process.exit(0); } - try { - switch (choice) { - case MENU_CHOICES.ADD: - await runAdd(undefined, {}); - break; - case MENU_CHOICES.COMPILE: - await runCompile({ verbose: false, dryRun: false }); - break; - case MENU_CHOICES.DOCTOR: - await runDoctor(); - break; - case MENU_CHOICES.REMOVE: - await runRemove(undefined); - break; - } - } catch (err) { - if (err instanceof Error && err.name === 'ExitPromptError') { - // Ctrl+C inside a subcommand — return to main menu - } else { - throw err; - } + switch (choice) { + case MENU_CHOICES.ADD: + await runAdd(undefined, {}); + break; + case MENU_CHOICES.COMPILE: + await runCompile({ verbose: false, dryRun: false }); + break; + case MENU_CHOICES.DOCTOR: + await runDoctor(); + break; + case MENU_CHOICES.REMOVE: + await runRemove(undefined); + break; } } } diff --git a/packages/cli/src/commands/remove.ts b/packages/cli/src/commands/remove.ts index 00df3eb..04e2b52 100644 --- a/packages/cli/src/commands/remove.ts +++ b/packages/cli/src/commands/remove.ts @@ -2,11 +2,11 @@ import { join } from 'node:path'; import { readFile, writeFile, unlink } from 'node:fs/promises'; import type { Command } from 'commander'; import { parse, stringify } from 'yaml'; -import { checkbox, confirm, Separator } from '@inquirer/prompts'; import { readConfig } from '../core/parser.js'; import { fileExists } from '../utils/fs.js'; import { isAssetType, removeAsset } from '../core/assets.js'; import { validateInput } from './add.js'; +import { multiselectPrompt, confirmPrompt, introPrompt, outroPrompt, isInteractiveSession } from '../utils/prompt.js'; import * as ui from '../utils/ui.js'; import type { PulledEntry, AssetEntry } from '../bridges/types.js'; @@ -54,6 +54,10 @@ async function removeRule(cwd: string, path: string): Promise { export async function runRemove(ruleArg: string | undefined): Promise { const cwd = process.cwd(); + if (isInteractiveSession()) { + introPrompt('Remove rules or assets'); + } + if (!(await fileExists(join(cwd, '.dwf', 'config.yml')))) { ui.error('.dwf/config.yml not found', 'Run devw init to initialize the project'); process.exitCode = 1; @@ -63,6 +67,12 @@ export async function runRemove(ruleArg: string | undefined): Promise { const config = await readConfig(cwd); if (!ruleArg) { + if (!isInteractiveSession()) { + ui.error('No rule specified', 'Usage: devw remove /'); + process.exitCode = 1; + return; + } + const hasRules = config.pulled.length > 0; const hasAssets = config.assets.length > 0; @@ -73,33 +83,30 @@ export async function runRemove(ruleArg: string | undefined): Promise { type RemoveChoice = { kind: 'rule'; path: string } | { kind: 'asset'; type: string; name: string }; - const choices: (RemoveChoice | Separator)[] = []; + const choices: RemoveChoice[] = []; if (hasRules) { - choices.push(new Separator('── Rules ──')); for (const p of config.pulled) { - choices.push({ kind: 'rule', path: p.path } as RemoveChoice); + choices.push({ kind: 'rule', path: p.path }); } } if (hasAssets) { - choices.push(new Separator('── Assets ──')); for (const a of config.assets) { - choices.push({ kind: 'asset', type: a.type, name: a.name } as RemoveChoice); + choices.push({ kind: 'asset', type: a.type, name: a.name }); } } let selected: RemoveChoice[]; try { - selected = await checkbox({ + selected = await multiselectPrompt({ message: 'Select items to remove', - choices: choices.map((c) => { - if (c instanceof Separator) return c; + options: choices.map((c) => { if (c.kind === 'rule') { const entry = config.pulled.find((p) => p.path === c.path); - return { name: `${c.path} (v${entry?.version ?? '?'})`, value: c }; + return { label: `[rule] ${c.path} (v${entry?.version ?? '?'})`, value: c }; } - return { name: `${c.type}/${c.name}`, value: c }; + return { label: `[asset] ${c.type}/${c.name}`, value: c }; }), }); } catch { @@ -112,9 +119,9 @@ export async function runRemove(ruleArg: string | undefined): Promise { } try { - const shouldProceed = await confirm({ + const shouldProceed = await confirmPrompt({ message: `Remove ${String(selected.length)} item(s)?`, - default: true, + defaultValue: true, }); if (!shouldProceed) { ui.info('Remove cancelled'); @@ -137,6 +144,7 @@ export async function runRemove(ruleArg: string | undefined): Promise { const { runCompileFromAdd } = await import('./compile.js'); await runCompileFromAdd(); + outroPrompt('Remove command completed'); return; } @@ -185,6 +193,7 @@ export async function runRemove(ruleArg: string | undefined): Promise { const { runCompileFromAdd } = await import('./compile.js'); await runCompileFromAdd(); + outroPrompt('Remove command completed'); return; } @@ -206,6 +215,7 @@ export async function runRemove(ruleArg: string | undefined): Promise { const { runCompileFromAdd } = await import('./compile.js'); await runCompileFromAdd(); + outroPrompt('Remove command completed'); } export function registerRemoveCommand(program: Command): void { diff --git a/packages/cli/src/commands/watch.ts b/packages/cli/src/commands/watch.ts index e7a1ed0..9b24866 100644 --- a/packages/cli/src/commands/watch.ts +++ b/packages/cli/src/commands/watch.ts @@ -1,6 +1,6 @@ import { join } from 'node:path'; import type { Command } from 'commander'; -import chalk from 'chalk'; +import pc from 'picocolors'; import chokidar from 'chokidar'; import { executePipeline } from './compile.js'; import type { CompileResult } from './compile.js'; @@ -102,7 +102,7 @@ async function runWatch(options: WatchOptions): Promise { }); ui.newline(); - ui.header(chalk.green('Watching .dwf/ for changes...')); + ui.header(pc.green('Watching .dwf/ for changes...')); ui.info('Running initial compile...'); ui.newline(); diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index dda7ddd..b028e2b 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -9,6 +9,7 @@ import { registerListCommand } from './commands/list.js'; import { registerExplainCommand } from './commands/explain.js'; import { registerWatchCommand } from './commands/watch.js'; import { runMainMenu } from './commands/menu.js'; +import { renderBanner } from './utils/banner.js'; const require = createRequire(import.meta.url); const pkg = require('../package.json') as { version: string }; @@ -20,6 +21,15 @@ program .description('Compile developer rules into editor-specific config files') .version(pkg.version); +program.addHelpText('beforeAll', () => { + const banner = renderBanner(); + if (banner.length === 0) { + return ''; + } + + return `${banner}\n`; +}); + registerInitCommand(program); registerCompileCommand(program); registerDoctorCommand(program); diff --git a/packages/cli/src/utils/banner.ts b/packages/cli/src/utils/banner.ts new file mode 100644 index 0000000..850f9d5 --- /dev/null +++ b/packages/cli/src/utils/banner.ts @@ -0,0 +1,34 @@ +const BANNER_LINES = [ + ' ____ __ __ __ __', + '| _ \\ ___ _ _\\ \\ / /__ _ __| | __ / _| ___ __ _', + "| | | |/ _ \\ | | |\\ \\ / / _ \\| '__| |/ /| |_ / _ \\/ _` |", + '| |_| | __/ |_| | \\ \\/ / (_) | | | < | _| __/ (_| |', + '|____/ \\___|\\__,_| \\__/ \\___/|_| |_|\\_\\|_| \\___|\\__,_|', +] as const; + +const GRADIENT_START = 45; +const GRADIENT_END = 201; + +function colorizeLine(line: string, color: number): string { + return `\u001b[38;5;${String(color)}m${line}\u001b[0m`; +} + +function gradientColor(index: number, total: number): number { + if (total <= 1) { + return GRADIENT_START; + } + + const ratio = index / (total - 1); + return Math.round(GRADIENT_START + (GRADIENT_END - GRADIENT_START) * ratio); +} + +export function renderBanner(): string { + if (!process.stdout.isTTY) { + return ''; + } + + return BANNER_LINES.map((line, index) => { + const color = gradientColor(index, BANNER_LINES.length); + return colorizeLine(line, color); + }).join('\n'); +} diff --git a/packages/cli/src/utils/prompt.ts b/packages/cli/src/utils/prompt.ts new file mode 100644 index 0000000..2e34b45 --- /dev/null +++ b/packages/cli/src/utils/prompt.ts @@ -0,0 +1,178 @@ +import * as p from '@clack/prompts'; + +export interface PromptOption { + value: T; + label: string; + hint?: string; +} + +interface SelectPromptOptions { + message: string; + options: ReadonlyArray>; + initialValue?: T; +} + +interface MultiselectPromptOptions { + message: string; + options: ReadonlyArray>; + required?: boolean; + initialValues?: ReadonlyArray; +} + +interface ConfirmPromptOptions { + message: string; + defaultValue?: boolean; +} + +interface TextPromptOptions { + message: string; + placeholder?: string; + defaultValue?: string; +} + +interface SpinnerTask { + label: string; + task: () => Promise; + successMessage?: string; + errorMessage?: string; +} + +function toClackOption(option: PromptOption): p.Option { + const mapped = { + value: option.value, + label: option.label, + } as { value: T; label: string; hint?: string }; + + if (option.hint !== undefined) { + mapped.hint = option.hint; + } + + return mapped as p.Option; +} + +function ensureInteractive(): void { + if (!isInteractiveSession()) { + throw new Error( + 'Interactive prompts are unavailable in non-interactive mode (TTY/CI). Use CLI flags to run non-interactively.', + ); + } +} + +function isCiEnvironment(): boolean { + const rawCi = process.env['CI']; + if (typeof rawCi !== 'string') { + return false; + } + + const normalized = rawCi.trim().toLowerCase(); + if (normalized.length === 0) { + return false; + } + + return normalized !== '0' && normalized !== 'false'; +} + +export function isInteractiveSession(): boolean { + return Boolean(process.stdout.isTTY && process.stdin.isTTY) && !isCiEnvironment(); +} + +function handleCancel(value: T | symbol): T { + if (p.isCancel(value)) { + p.cancel('Cancelled'); + process.exit(0); + } + + return value as T; +} + +export async function selectPrompt(options: SelectPromptOptions): Promise { + ensureInteractive(); + + const value = await p.select({ + message: options.message, + initialValue: options.initialValue, + options: options.options.map((option) => toClackOption(option)), + }); + + return handleCancel(value); +} + +export async function multiselectPrompt( + options: MultiselectPromptOptions, +): Promise { + ensureInteractive(); + + const value = await p.multiselect({ + message: options.message, + required: options.required, + initialValues: options.initialValues ? [...options.initialValues] : undefined, + options: options.options.map((option) => toClackOption(option)), + }); + + return handleCancel(value); +} + +export async function confirmPrompt(options: ConfirmPromptOptions): Promise { + ensureInteractive(); + + const value = await p.confirm({ + message: options.message, + initialValue: options.defaultValue, + }); + + return handleCancel(value); +} + +export async function textPrompt(options: TextPromptOptions): Promise { + ensureInteractive(); + + const value = await p.text({ + message: options.message, + placeholder: options.placeholder, + defaultValue: options.defaultValue, + }); + + return handleCancel(value); +} + +export function introPrompt(message: string): void { + if (!isInteractiveSession()) { + return; + } + + p.intro(message); +} + +export function outroPrompt(message: string): void { + if (!isInteractiveSession()) { + return; + } + + p.outro(message); +} + +export function notePrompt(message: string, title?: string): void { + if (!isInteractiveSession()) { + return; + } + + p.note(message, title); +} + +export async function spinnerTask(options: SpinnerTask): Promise { + if (!isInteractiveSession()) { + return options.task(); + } + + const spinner = p.spinner(); + spinner.start(options.label); + + try { + const result = await options.task(); + spinner.stop(options.successMessage ?? options.label); + return result; + } catch (error) { + spinner.stop(options.errorMessage ?? options.label); + throw error; + } +} diff --git a/packages/cli/src/utils/table.ts b/packages/cli/src/utils/table.ts new file mode 100644 index 0000000..33cf9cd --- /dev/null +++ b/packages/cli/src/utils/table.ts @@ -0,0 +1,59 @@ +import pc from 'picocolors'; + +const INDENT = ' '; + +function padCell(value: string, width: number): string { + return value.padEnd(width, ' '); +} + +function line(left: string, middle: string, right: string, widths: number[]): string { + const segments = widths.map((width) => '─'.repeat(width + 2)); + return `${INDENT}${left}${segments.join(middle)}${right}`; +} + +function inferWidths(headers: string[], rows: string[][], columnWidths?: number[]): number[] { + return headers.map((header, index) => { + const headerWidth = header.length; + const rowWidth = rows.reduce((maxWidth, row) => { + const value = row[index] ?? ''; + return Math.max(maxWidth, value.length); + }, 0); + const minWidth = columnWidths?.[index] ?? 0; + return Math.max(headerWidth, rowWidth, minWidth); + }); +} + +function renderRow(cells: string[], widths: number[], bold = false): string { + const rendered = widths.map((width, index) => { + const rawCell = cells[index] ?? ''; + const paddedCell = ` ${padCell(rawCell, width)} `; + return bold ? pc.bold(paddedCell) : paddedCell; + }); + + return `${INDENT}│${rendered.join('│')}│`; +} + +export function renderTable( + headers: string[], + rows: string[][], + columnWidths?: number[], +): string { + if (headers.length === 0) { + return ''; + } + + const widths = inferWidths(headers, rows, columnWidths); + const output: string[] = []; + + output.push(line('┌', '┬', '┐', widths)); + output.push(renderRow(headers, widths, true)); + output.push(line('├', '┼', '┤', widths)); + + for (const row of rows) { + output.push(renderRow(row, widths)); + } + + output.push(line('└', '┴', '┘', widths)); + + return output.join('\n'); +} diff --git a/packages/cli/src/utils/ui.ts b/packages/cli/src/utils/ui.ts index 815c4ff..3c4a9da 100644 --- a/packages/cli/src/utils/ui.ts +++ b/packages/cli/src/utils/ui.ts @@ -1,4 +1,4 @@ -import chalk from 'chalk'; +import pc from 'picocolors'; export const ICONS = { success: '\u2714', @@ -19,22 +19,22 @@ const INDENT = { } as const; export function success(msg: string): void { - console.log(`${INDENT.section}${chalk.green(ICONS.success)} ${msg}`); + console.log(`${INDENT.section}${pc.green(ICONS.success)} ${msg}`); } export function error(msg: string, hint?: string): void { - console.error(`${INDENT.section}${chalk.red(ICONS.error)} ${chalk.red(msg)}`); + console.error(`${INDENT.section}${pc.red(ICONS.error)} ${pc.red(msg)}`); if (hint) { - console.error(`${INDENT.detail}${chalk.dim(hint)}`); + console.error(`${INDENT.detail}${pc.dim(hint)}`); } } export function warn(msg: string): void { - console.log(`${INDENT.section}${chalk.yellow(ICONS.warn)} ${chalk.yellow(msg)}`); + console.log(`${INDENT.section}${pc.yellow(ICONS.warn)} ${pc.yellow(msg)}`); } export function info(msg: string): void { - console.log(`${INDENT.section}${chalk.dim(msg)}`); + console.log(`${INDENT.section}${pc.dim(msg)}`); } export function log(msg: string): void { @@ -42,16 +42,16 @@ export function log(msg: string): void { } export function header(title: string): void { - console.log(`${INDENT.section}${chalk.bold(title)}`); + console.log(`${INDENT.section}${pc.bold(title)}`); } export function keyValue(label: string, value: string): void { const padded = label.padEnd(10); - console.log(`${INDENT.detail}${chalk.dim(padded)}${value}`); + console.log(`${INDENT.detail}${pc.dim(padded)}${value}`); } export function divider(): void { - console.log(`${INDENT.section}${chalk.dim(`${ICONS.separator}${ICONS.separator}`)}`); + console.log(`${INDENT.section}${pc.dim(`${ICONS.separator}${ICONS.separator}`)}`); } export function newline(): void { @@ -65,36 +65,36 @@ export function summary(counts: { passed?: number; failed?: number; skipped?: nu const skipped = counts.skipped ?? 0; if (passed > 0 || (failed === 0 && skipped === 0)) { - parts.push(chalk.green(`${String(passed)} passed`)); + parts.push(pc.green(`${String(passed)} passed`)); } if (failed > 0 || (passed === 0 && skipped === 0)) { - parts.push(chalk.red(`${String(failed)} failed`)); + parts.push(pc.red(`${String(failed)} failed`)); } if (skipped > 0) { - parts.push(chalk.dim(`${String(skipped)} skipped`)); + parts.push(pc.dim(`${String(skipped)} skipped`)); } - console.log(`${INDENT.section}${parts.join(chalk.dim(` ${ICONS.dot} `))}`); + console.log(`${INDENT.section}${parts.join(pc.dim(` ${ICONS.dot} `))}`); } export function timing(ms: number): string { - return chalk.dim(`(${String(Math.round(ms))}ms)`); + return pc.dim(`(${String(Math.round(ms))}ms)`); } export function list(items: string[]): void { for (const item of items) { - console.log(`${INDENT.detail}${chalk.dim(ICONS.bullet)} ${item}`); + console.log(`${INDENT.detail}${pc.dim(ICONS.bullet)} ${item}`); } } export function check(passed: boolean, msg: string, skipped?: boolean): void { if (skipped) { - console.log(`${INDENT.section}${chalk.dim(ICONS.skip)} ${chalk.dim(msg)}`); + console.log(`${INDENT.section}${pc.dim(ICONS.skip)} ${pc.dim(msg)}`); return; } if (passed) { - console.log(`${INDENT.section}${chalk.green(ICONS.success)} ${msg}`); + console.log(`${INDENT.section}${pc.green(ICONS.success)} ${msg}`); } else { - console.log(`${INDENT.section}${chalk.red(ICONS.error)} ${chalk.red(msg)}`); + console.log(`${INDENT.section}${pc.red(ICONS.error)} ${pc.red(msg)}`); } } diff --git a/packages/cli/tests/commands/menu.test.ts b/packages/cli/tests/commands/menu.test.ts index c37d89e..c1e9d48 100644 --- a/packages/cli/tests/commands/menu.test.ts +++ b/packages/cli/tests/commands/menu.test.ts @@ -15,10 +15,12 @@ function makeMockCommand(): { helpCalled: boolean; help: () => void } { describe('runMainMenu — TTY guard', () => { let originalStdoutIsTTY: boolean | undefined; let originalStdinIsTTY: boolean | undefined; + let originalCI: string | undefined; beforeEach(() => { originalStdoutIsTTY = process.stdout.isTTY; originalStdinIsTTY = process.stdin.isTTY; + originalCI = process.env['CI']; }); afterEach(() => { @@ -33,6 +35,12 @@ describe('runMainMenu — TTY guard', () => { writable: true, configurable: true, }); + + if (originalCI === undefined) { + delete process.env['CI']; + } else { + process.env['CI'] = originalCI; + } }); it('calls command.help() when stdout is not a TTY', async () => { @@ -71,4 +79,23 @@ describe('runMainMenu — TTY guard', () => { assert.equal(mockCommand.helpCalled, true); }); + + it('calls command.help() when CI mode is enabled', async () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + process.env['CI'] = 'true'; + + const mockCommand = makeMockCommand(); + await runMainMenu(mockCommand as unknown as import('commander').Command); + + assert.equal(mockCommand.helpCalled, true); + }); }); diff --git a/packages/cli/tests/ui/output.test.ts b/packages/cli/tests/ui/output.test.ts index 904b903..470c1b6 100644 --- a/packages/cli/tests/ui/output.test.ts +++ b/packages/cli/tests/ui/output.test.ts @@ -19,11 +19,11 @@ interface RunResult { exitCode: number; } -async function run(args: string[], cwd: string): Promise { +async function run(args: string[], cwd: string, extraEnv?: Record): Promise { try { const { stdout, stderr } = await execFile(NODE, [DEVW, ...args], { cwd, - env: { ...process.env, NO_COLOR: '1', FORCE_COLOR: '0' }, + env: { ...process.env, NO_COLOR: '1', FORCE_COLOR: '0', ...extraEnv }, }); return { stdout, stderr, exitCode: 0 }; } catch (err: unknown) { @@ -92,6 +92,8 @@ describe('output format: compile', () => { assert.ok(result.stdout.includes('\u2192'), 'should have arrow'); assert.ok(result.stdout.includes('file'), 'should mention files'); assert.ok(/\(\d+ms\)/.test(result.stdout), 'should have timing'); + assert.ok(result.stdout.includes('bridge'), 'should include summary table headers'); + assert.ok(result.stdout.includes('generated'), 'should include generated column'); }); it('shows file list with bullet prefix', async () => { @@ -106,6 +108,26 @@ describe('output format: compile', () => { }); }); +describe('output format: help', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-output-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('does not print the banner in non-TTY output', async () => { + const result = await run(['--help'], tmpDir); + + assert.equal(result.exitCode, 0); + assert.ok(result.stdout.includes('Usage: devw'), 'should print standard help text'); + assert.equal(result.stdout.includes('____ __ __'), false); + }); +}); + describe('output format: doctor', () => { let tmpDir: string; @@ -284,4 +306,14 @@ describe('output format: error messages', () => { assert.equal(result.exitCode, 1); assert.ok(result.stderr.includes('\u2717'), 'should have error icon'); }); + + it('remove without args in non-interactive mode shows usage hint', async () => { + await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), CONFIG_TEMPLATE(['claude'])); + + const result = await run(['remove'], tmpDir, { CI: 'true' }); + + assert.equal(result.exitCode, 1); + assert.ok(result.stderr.includes('Usage: devw remove /')); + }); }); diff --git a/packages/cli/tests/utils/banner.test.ts b/packages/cli/tests/utils/banner.test.ts new file mode 100644 index 0000000..32d523c --- /dev/null +++ b/packages/cli/tests/utils/banner.test.ts @@ -0,0 +1,47 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { renderBanner } from '../../src/utils/banner.js'; + +describe('renderBanner', () => { + let originalStdoutIsTTY: boolean | undefined; + + beforeEach(() => { + originalStdoutIsTTY = process.stdout.isTTY; + }); + + afterEach(() => { + Object.defineProperty(process.stdout, 'isTTY', { + value: originalStdoutIsTTY, + writable: true, + configurable: true, + }); + }); + + it('returns empty string when stdout is not a TTY', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + + assert.equal(renderBanner(), ''); + }); + + it('returns deterministic ANSI banner for TTY output', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + + const expected = [ + '\u001b[38;5;45m ____ __ __ __ __\u001b[0m', + '\u001b[38;5;84m| _ \\ ___ _ _\\ \\ / /__ _ __| | __ / _| ___ __ _\u001b[0m', + "\u001b[38;5;123m| | | |/ _ \\ | | |\\ \\ / / _ \\| '__| |/ /| |_ / _ \\/ _` |\u001b[0m", + '\u001b[38;5;162m| |_| | __/ |_| | \\ \\/ / (_) | | | < | _| __/ (_| |\u001b[0m', + '\u001b[38;5;201m|____/ \\___|\\__,_| \\__/ \\___/|_| |_|\\_\\|_| \\___|\\__,_|\u001b[0m', + ].join('\n'); + + assert.equal(renderBanner(), expected); + }); +}); diff --git a/packages/cli/tests/utils/legacy-imports.test.ts b/packages/cli/tests/utils/legacy-imports.test.ts new file mode 100644 index 0000000..6b175f2 --- /dev/null +++ b/packages/cli/tests/utils/legacy-imports.test.ts @@ -0,0 +1,45 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { readdir, readFile } from 'node:fs/promises'; +import { join } from 'node:path'; + +const SRC_ROOT = join(process.cwd(), 'src'); + +async function collectTypeScriptFiles(root: string): Promise { + const entries = await readdir(root, { withFileTypes: true }); + const files: string[] = []; + + for (const entry of entries) { + const fullPath = join(root, entry.name); + if (entry.isDirectory()) { + const nested = await collectTypeScriptFiles(fullPath); + files.push(...nested); + continue; + } + + if (entry.isFile() && fullPath.endsWith('.ts')) { + files.push(fullPath); + } + } + + return files; +} + +describe('legacy imports are fully removed from src', () => { + it('does not use chalk or @inquirer/prompts', async () => { + const tsFiles = await collectTypeScriptFiles(SRC_ROOT); + const offenders: string[] = []; + + for (const filePath of tsFiles) { + const content = await readFile(filePath, 'utf-8'); + if (content.includes("'chalk'") || content.includes('"chalk"')) { + offenders.push(`${filePath}: chalk`); + } + if (content.includes('@inquirer/prompts')) { + offenders.push(`${filePath}: @inquirer/prompts`); + } + } + + assert.deepEqual(offenders, []); + }); +}); diff --git a/packages/cli/tests/utils/prompt.test.ts b/packages/cli/tests/utils/prompt.test.ts new file mode 100644 index 0000000..4f5c3e7 --- /dev/null +++ b/packages/cli/tests/utils/prompt.test.ts @@ -0,0 +1,150 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { + isInteractiveSession, + selectPrompt, + spinnerTask, + introPrompt, + outroPrompt, + notePrompt, +} from '../../src/utils/prompt.js'; + +describe('prompt utils', () => { + let originalStdoutIsTTY: boolean | undefined; + let originalStdinIsTTY: boolean | undefined; + let originalCI: string | undefined; + + beforeEach(() => { + originalStdoutIsTTY = process.stdout.isTTY; + originalStdinIsTTY = process.stdin.isTTY; + originalCI = process.env['CI']; + }); + + afterEach(() => { + Object.defineProperty(process.stdout, 'isTTY', { + value: originalStdoutIsTTY, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: originalStdinIsTTY, + writable: true, + configurable: true, + }); + + if (originalCI === undefined) { + delete process.env['CI']; + } else { + process.env['CI'] = originalCI; + } + }); + + it('returns false when stdout is not a TTY', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + delete process.env['CI']; + + assert.equal(isInteractiveSession(), false); + }); + + it('returns false when CI is enabled even if both streams are TTY', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + process.env['CI'] = 'true'; + + assert.equal(isInteractiveSession(), false); + }); + + it('returns true for interactive non-CI session', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + process.env['CI'] = 'false'; + + assert.equal(isInteractiveSession(), true); + }); + + it('throws a helpful message for prompts in non-interactive mode', async () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + + await assert.rejects( + async () => selectPrompt({ message: 'Pick one', options: [{ label: 'one', value: 'one' }] }), + (error: unknown) => { + assert.ok(error instanceof Error); + assert.match(error.message, /non-interactive mode/); + return true; + }, + ); + }); + + it('runs spinner tasks without clack spinner in non-interactive mode', async () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + + const result = await spinnerTask({ + label: 'work', + task: async () => 'ok', + }); + + assert.equal(result, 'ok'); + }); + + it('intro/outro/note are no-ops in non-interactive mode', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + + assert.doesNotThrow(() => { + introPrompt('hello'); + notePrompt('body', 'title'); + outroPrompt('bye'); + }); + }); +}); diff --git a/packages/cli/tests/utils/table.test.ts b/packages/cli/tests/utils/table.test.ts new file mode 100644 index 0000000..1334fad --- /dev/null +++ b/packages/cli/tests/utils/table.test.ts @@ -0,0 +1,36 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { renderTable } from '../../src/utils/table.js'; + +describe('renderTable', () => { + it('renders deterministic borders and rows', () => { + const output = renderTable( + ['bridge', 'generated', 'failed'], + [ + ['claude', '2', '0'], + ['gemini', '1', '1'], + ], + ); + + const expected = [ + ' ┌────────┬───────────┬────────┐', + ' │ bridge │ generated │ failed │', + ' ├────────┼───────────┼────────┤', + ' │ claude │ 2 │ 0 │', + ' │ gemini │ 1 │ 1 │', + ' └────────┴───────────┴────────┘', + ].join('\n'); + + assert.equal(output, expected); + }); + + it('returns empty string when headers are empty', () => { + assert.equal(renderTable([], [['value']]), ''); + }); + + it('honors minimum column widths', () => { + const output = renderTable(['id'], [['x']], [5]); + assert.ok(output.includes('│ id │')); + assert.ok(output.includes('│ x │')); + }); +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 2ee518a..bb6d862 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -14,18 +14,18 @@ importers: packages/cli: dependencies: - '@inquirer/prompts': - specifier: ^7.0.0 - version: 7.10.1(@types/node@22.19.9) - chalk: - specifier: ^5.4.0 - version: 5.6.2 + '@clack/prompts': + specifier: ^0.9.0 + version: 0.9.1 chokidar: specifier: ^3.6.0 version: 3.6.0 commander: specifier: ^13.0.0 version: 13.1.0 + picocolors: + specifier: ^1.1.0 + version: 1.1.1 yaml: specifier: ^2.7.0 version: 2.8.2 @@ -98,54 +98,11 @@ packages: '@changesets/write@0.4.0': resolution: {integrity: sha512-CdTLvIOPiCNuH71pyDu3rA+Q0n65cmAbXnwWH84rKGiFumFzkmHNT8KHTMEchcxN+Kl8I54xGUhJ7l3E7X396Q==} - '@inquirer/ansi@1.0.2': - resolution: {integrity: sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==} - engines: {node: '>=18'} - - '@inquirer/checkbox@4.3.2': - resolution: {integrity: sha512-VXukHf0RR1doGe6Sm4F0Em7SWYLTHSsbGfJdS9Ja2bX5/D5uwVOEjr07cncLROdBvmnvCATYEWlHqYmXv2IlQA==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/confirm@5.1.21': - resolution: {integrity: sha512-KR8edRkIsUayMXV+o3Gv+q4jlhENF9nMYUZs9PA2HzrXeHI8M5uDag70U7RJn9yyiMZSbtF5/UexBtAVtZGSbQ==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/core@10.3.2': - resolution: {integrity: sha512-43RTuEbfP8MbKzedNqBrlhhNKVwoK//vUFNW3Q3vZ88BLcrs4kYpGg+B2mm5p2K/HfygoCxuKwJJiv8PbGmE0A==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/editor@4.2.23': - resolution: {integrity: sha512-aLSROkEwirotxZ1pBaP8tugXRFCxW94gwrQLxXfrZsKkfjOYC1aRvAZuhpJOb5cu4IBTJdsCigUlf2iCOu4ZDQ==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true + '@clack/core@0.4.1': + resolution: {integrity: sha512-Pxhij4UXg8KSr7rPek6Zowm+5M22rbd2g1nfojHJkxp5YkFqiZ2+YLEM/XGVIzvGOcM0nqjIFxrpDwWRZYWYjA==} - '@inquirer/expand@4.0.23': - resolution: {integrity: sha512-nRzdOyFYnpeYTTR2qFwEVmIWypzdAx/sIkCMeTNTcflFOovfqUk+HcFhQQVBftAh9gmGrpFj6QcGEqrDMDOiew==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true + '@clack/prompts@0.9.1': + resolution: {integrity: sha512-JIpyaboYZeWYlyP0H+OoPPxd6nqueG/CmN6ixBiNFsIDHREevjIf0n0Ohh5gr5C8pEDknzgvz+pIJ8dMhzWIeg==} '@inquirer/external-editor@1.0.3': resolution: {integrity: sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==} @@ -156,82 +113,6 @@ packages: '@types/node': optional: true - '@inquirer/figures@1.0.15': - resolution: {integrity: sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==} - engines: {node: '>=18'} - - '@inquirer/input@4.3.1': - resolution: {integrity: sha512-kN0pAM4yPrLjJ1XJBjDxyfDduXOuQHrBB8aLDMueuwUGn+vNpF7Gq7TvyVxx8u4SHlFFj4trmj+a2cbpG4Jn1g==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/number@3.0.23': - resolution: {integrity: sha512-5Smv0OK7K0KUzUfYUXDXQc9jrf8OHo4ktlEayFlelCjwMXz0299Y8OrI+lj7i4gCBY15UObk76q0QtxjzFcFcg==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/password@4.0.23': - resolution: {integrity: sha512-zREJHjhT5vJBMZX/IUbyI9zVtVfOLiTO66MrF/3GFZYZ7T4YILW5MSkEYHceSii/KtRk+4i3RE7E1CUXA2jHcA==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/prompts@7.10.1': - resolution: {integrity: sha512-Dx/y9bCQcXLI5ooQ5KyvA4FTgeo2jYj/7plWfV5Ak5wDPKQZgudKez2ixyfz7tKXzcJciTxqLeK7R9HItwiByg==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/rawlist@4.1.11': - resolution: {integrity: sha512-+LLQB8XGr3I5LZN/GuAHo+GpDJegQwuPARLChlMICNdwW7OwV2izlCSCxN6cqpL0sMXmbKbFcItJgdQq5EBXTw==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/search@3.2.2': - resolution: {integrity: sha512-p2bvRfENXCZdWF/U2BXvnSI9h+tuA8iNqtUKb9UWbmLYCRQxd8WkvwWvYn+3NgYaNwdUkHytJMGG4MMLucI1kA==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/select@4.4.2': - resolution: {integrity: sha512-l4xMuJo55MAe+N7Qr4rX90vypFwCajSakx59qe/tMaC1aEHWLyw68wF4o0A4SLAY4E0nd+Vt+EyskeDIqu1M6w==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/type@3.0.10': - resolution: {integrity: sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - '@manypkg/find-root@1.1.0': resolution: {integrity: sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA==} @@ -264,10 +145,6 @@ packages: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} engines: {node: '>=8'} - ansi-styles@4.3.0: - resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} - engines: {node: '>=8'} - anymatch@3.1.3: resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} engines: {node: '>= 8'} @@ -294,10 +171,6 @@ packages: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} - chalk@5.6.2: - resolution: {integrity: sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==} - engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - chardet@2.1.1: resolution: {integrity: sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==} @@ -309,17 +182,6 @@ packages: resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} engines: {node: '>=8'} - cli-width@4.1.0: - resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} - engines: {node: '>= 12'} - - color-convert@2.0.1: - resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} - engines: {node: '>=7.0.0'} - - color-name@1.1.4: - resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - commander@13.1.0: resolution: {integrity: sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==} engines: {node: '>=18'} @@ -336,9 +198,6 @@ packages: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} - emoji-regex@8.0.0: - resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - enquirer@2.4.1: resolution: {integrity: sha512-rRqJg/6gd538VHvR3PSrdRBb/1Vy2YfzHqzvbhGIQpDRKIa4FgV/54b5Q1xYSxOOwKvjXweS26E0Q+nAMwp2pQ==} engines: {node: '>=8.6'} @@ -410,10 +269,6 @@ packages: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} - is-fullwidth-code-point@3.0.0: - resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} - engines: {node: '>=8'} - is-glob@4.0.3: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} @@ -463,10 +318,6 @@ packages: resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} engines: {node: '>=4'} - mute-stream@2.0.0: - resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==} - engines: {node: ^18.17.0 || >=20.5.0} - normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} @@ -570,6 +421,9 @@ packages: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} + sisteransi@1.0.5: + resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -580,10 +434,6 @@ packages: sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - string-width@4.2.3: - resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} - engines: {node: '>=8'} - strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} @@ -617,19 +467,11 @@ packages: engines: {node: '>= 8'} hasBin: true - wrap-ansi@6.2.0: - resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} - engines: {node: '>=8'} - yaml@2.8.2: resolution: {integrity: sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==} engines: {node: '>= 14.6'} hasBin: true - yoctocolors-cjs@2.1.3: - resolution: {integrity: sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==} - engines: {node: '>=18'} - snapshots: '@babel/runtime@7.28.6': {} @@ -778,53 +620,16 @@ snapshots: human-id: 4.1.3 prettier: 2.8.8 - '@inquirer/ansi@1.0.2': {} - - '@inquirer/checkbox@4.3.2(@types/node@22.19.9)': + '@clack/core@0.4.1': dependencies: - '@inquirer/ansi': 1.0.2 - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/figures': 1.0.15 - '@inquirer/type': 3.0.10(@types/node@22.19.9) - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/confirm@5.1.21(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/core@10.3.2(@types/node@22.19.9)': - dependencies: - '@inquirer/ansi': 1.0.2 - '@inquirer/figures': 1.0.15 - '@inquirer/type': 3.0.10(@types/node@22.19.9) - cli-width: 4.1.0 - mute-stream: 2.0.0 - signal-exit: 4.1.0 - wrap-ansi: 6.2.0 - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/editor@4.2.23(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/external-editor': 1.0.3(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 + picocolors: 1.1.1 + sisteransi: 1.0.5 - '@inquirer/expand@4.0.23(@types/node@22.19.9)': + '@clack/prompts@0.9.1': dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 + '@clack/core': 0.4.1 + picocolors: 1.1.1 + sisteransi: 1.0.5 '@inquirer/external-editor@1.0.3(@types/node@22.19.9)': dependencies: @@ -833,76 +638,6 @@ snapshots: optionalDependencies: '@types/node': 22.19.9 - '@inquirer/figures@1.0.15': {} - - '@inquirer/input@4.3.1(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/number@3.0.23(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/password@4.0.23(@types/node@22.19.9)': - dependencies: - '@inquirer/ansi': 1.0.2 - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/prompts@7.10.1(@types/node@22.19.9)': - dependencies: - '@inquirer/checkbox': 4.3.2(@types/node@22.19.9) - '@inquirer/confirm': 5.1.21(@types/node@22.19.9) - '@inquirer/editor': 4.2.23(@types/node@22.19.9) - '@inquirer/expand': 4.0.23(@types/node@22.19.9) - '@inquirer/input': 4.3.1(@types/node@22.19.9) - '@inquirer/number': 3.0.23(@types/node@22.19.9) - '@inquirer/password': 4.0.23(@types/node@22.19.9) - '@inquirer/rawlist': 4.1.11(@types/node@22.19.9) - '@inquirer/search': 3.2.2(@types/node@22.19.9) - '@inquirer/select': 4.4.2(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/rawlist@4.1.11(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/search@3.2.2(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/figures': 1.0.15 - '@inquirer/type': 3.0.10(@types/node@22.19.9) - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/select@4.4.2(@types/node@22.19.9)': - dependencies: - '@inquirer/ansi': 1.0.2 - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/figures': 1.0.15 - '@inquirer/type': 3.0.10(@types/node@22.19.9) - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/type@3.0.10(@types/node@22.19.9)': - optionalDependencies: - '@types/node': 22.19.9 - '@manypkg/find-root@1.1.0': dependencies: '@babel/runtime': 7.28.6 @@ -941,10 +676,6 @@ snapshots: ansi-regex@5.0.1: {} - ansi-styles@4.3.0: - dependencies: - color-convert: 2.0.1 - anymatch@3.1.3: dependencies: normalize-path: 3.0.0 @@ -968,8 +699,6 @@ snapshots: dependencies: fill-range: 7.1.1 - chalk@5.6.2: {} - chardet@2.1.1: {} chokidar@3.6.0: @@ -986,14 +715,6 @@ snapshots: ci-info@3.9.0: {} - cli-width@4.1.0: {} - - color-convert@2.0.1: - dependencies: - color-name: 1.1.4 - - color-name@1.1.4: {} - commander@13.1.0: {} cross-spawn@7.0.6: @@ -1008,8 +729,6 @@ snapshots: dependencies: path-type: 4.0.0 - emoji-regex@8.0.0: {} - enquirer@2.4.1: dependencies: ansi-colors: 4.1.3 @@ -1084,8 +803,6 @@ snapshots: is-extglob@2.1.1: {} - is-fullwidth-code-point@3.0.0: {} - is-glob@4.0.3: dependencies: is-extglob: 2.1.1 @@ -1128,8 +845,6 @@ snapshots: mri@1.2.0: {} - mute-stream@2.0.0: {} - normalize-path@3.0.0: {} outdent@0.5.0: {} @@ -1203,6 +918,8 @@ snapshots: signal-exit@4.1.0: {} + sisteransi@1.0.5: {} + slash@3.0.0: {} spawndamnit@3.0.1: @@ -1212,12 +929,6 @@ snapshots: sprintf-js@1.0.3: {} - string-width@4.2.3: - dependencies: - emoji-regex: 8.0.0 - is-fullwidth-code-point: 3.0.0 - strip-ansi: 6.0.1 - strip-ansi@6.0.1: dependencies: ansi-regex: 5.0.1 @@ -1240,12 +951,4 @@ snapshots: dependencies: isexe: 2.0.0 - wrap-ansi@6.2.0: - dependencies: - ansi-styles: 4.3.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - yaml@2.8.2: {} - - yoctocolors-cjs@2.1.3: {} From 4dbcdb6cd698039d36d763736c436cf32d170947 Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 18:33:28 +0200 Subject: [PATCH 17/18] feat(registry): add registry manifest, ETag caching, search, and version detection - Add registry.json manifest generation script and GitHub Action - Replace 1h TTL cache with ETag-based conditional requests - Add searchRegistry() and filterRegistryByTag() utilities - Add --search and --tag flags to devw add --list - Add version detection with update prompt on devw add - Update banner with user-customized ASCII art - Fix remove E2E test for non-TTY guard behavior --- .github/workflows/generate-registry.yml | 41 +++ content/registry.json | 187 ++++++++++++ packages/cli/src/commands/add.ts | 297 ++++++++++++++++---- packages/cli/src/utils/banner.ts | 11 +- packages/cli/src/utils/cache.ts | 94 ++++++- packages/cli/src/utils/github.ts | 110 ++++++++ packages/cli/src/utils/registry.ts | 59 ++++ packages/cli/tests/e2e/cli.test.ts | 8 +- packages/cli/tests/utils/banner.test.ts | 11 +- packages/cli/tests/utils/cache.edge.test.ts | 83 +++++- packages/cli/tests/utils/cache.test.ts | 2 +- packages/cli/tests/utils/registry.test.ts | 143 ++++++++++ scripts/generate-registry.js | 146 ++++++++++ 13 files changed, 1108 insertions(+), 84 deletions(-) create mode 100644 .github/workflows/generate-registry.yml create mode 100644 content/registry.json create mode 100644 packages/cli/src/utils/registry.ts create mode 100644 packages/cli/tests/utils/registry.test.ts create mode 100644 scripts/generate-registry.js diff --git a/.github/workflows/generate-registry.yml b/.github/workflows/generate-registry.yml new file mode 100644 index 0000000..d6773a8 --- /dev/null +++ b/.github/workflows/generate-registry.yml @@ -0,0 +1,41 @@ +name: Generate Registry + +on: + push: + branches: + - main + paths: + - content/** + paths-ignore: + - content/registry.json + +permissions: + contents: write + +jobs: + generate-registry: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Generate registry artifact + run: node scripts/generate-registry.js + + - name: Commit registry updates + run: | + if git diff --quiet -- content/registry.json; then + echo "No registry changes detected" + exit 0 + fi + + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + git add content/registry.json + git commit -m "chore: regenerate registry.json" + git push diff --git a/content/registry.json b/content/registry.json new file mode 100644 index 0000000..0508828 --- /dev/null +++ b/content/registry.json @@ -0,0 +1,187 @@ +{ + "version": 1, + "generated_at": "2026-04-12T15:46:19.045Z", + "rules": [ + { + "path": "css/tailwind", + "name": "tailwind", + "description": "Utility-first Tailwind CSS conventions and design tokens", + "version": "0.1.0", + "scope": "conventions", + "tags": [ + "tailwind", + "css", + "styling" + ], + "size_bytes": 874 + }, + { + "path": "frontend/accessibility", + "name": "accessibility", + "description": "Accessibility best practices for AI coding agents", + "version": "0.1.0", + "scope": "conventions", + "tags": [ + "frontend", + "accessibility", + "a11y", + "html" + ], + "size_bytes": 2781 + }, + { + "path": "frontend/design-guidelines", + "name": "design-guidelines", + "description": "UI design principles for AI coding agents", + "version": "0.1.0", + "scope": "design", + "tags": [ + "frontend", + "design", + "ui", + "ux" + ], + "size_bytes": 3476 + }, + { + "path": "frontend/performance", + "name": "performance", + "description": "Frontend performance optimization rules", + "version": "0.1.0", + "scope": "performance", + "tags": [ + "frontend", + "performance", + "core-web-vitals" + ], + "size_bytes": 2558 + }, + { + "path": "javascript/nextjs", + "name": "nextjs", + "description": "Next.js App Router patterns and React Server Components", + "version": "0.1.0", + "scope": "architecture", + "tags": [ + "nextjs", + "react", + "app-router", + "rsc" + ], + "size_bytes": 1250 + }, + { + "path": "javascript/react", + "name": "react", + "description": "React conventions and best practices for AI coding agents", + "version": "0.1.0", + "scope": "conventions", + "tags": [ + "react", + "frontend", + "components", + "hooks" + ], + "size_bytes": 1314 + }, + { + "path": "security/auth-patterns", + "name": "auth-patterns", + "description": "Authentication and authorization best practices", + "version": "0.1.0", + "scope": "security", + "tags": [ + "security", + "auth", + "authentication", + "authorization" + ], + "size_bytes": 2762 + }, + { + "path": "security/supabase-rls", + "name": "supabase-rls", + "description": "Supabase Row-Level Security enforcement and auth patterns", + "version": "0.1.0", + "scope": "security", + "tags": [ + "supabase", + "rls", + "security", + "database" + ], + "size_bytes": 958 + }, + { + "path": "testing/vitest", + "name": "vitest", + "description": "Vitest testing patterns and best practices", + "version": "0.1.0", + "scope": "testing", + "tags": [ + "vitest", + "testing", + "unit-tests" + ], + "size_bytes": 1174 + }, + { + "path": "typescript/strict", + "name": "strict", + "description": "Strict TypeScript conventions for professional codebases", + "version": "0.1.0", + "scope": "conventions", + "tags": [ + "typescript", + "strict", + "types" + ], + "size_bytes": 1124 + }, + { + "path": "workflow/debugging", + "name": "debugging", + "description": "Systematic debugging methodology for AI coding agents", + "version": "0.1.0", + "scope": "workflow", + "tags": [ + "workflow", + "debugging", + "methodology" + ], + "size_bytes": 2464 + }, + { + "path": "workflow/git-conventions", + "name": "git-conventions", + "description": "Git workflow and commit conventions", + "version": "0.1.0", + "scope": "workflow", + "tags": [ + "git", + "workflow", + "conventions" + ], + "size_bytes": 2010 + }, + { + "path": "workflow/spec-driven", + "name": "spec-driven", + "description": "Spec-driven development workflow: spec, plan, build, ship", + "version": "0.1.0", + "scope": "workflow", + "tags": [ + "workflow", + "spec-driven", + "methodology" + ], + "size_bytes": 1441 + } + ], + "assets": { + "commands": [], + "templates": [], + "hooks": [], + "presets": [] + } +} diff --git a/packages/cli/src/commands/add.ts b/packages/cli/src/commands/add.ts index 0fee685..26d696b 100644 --- a/packages/cli/src/commands/add.ts +++ b/packages/cli/src/commands/add.ts @@ -3,7 +3,12 @@ import { readFile, writeFile, mkdir } from 'node:fs/promises'; import type { Command } from 'commander'; import pc from 'picocolors'; import { stringify, parse } from 'yaml'; -import { fetchRawContent, fetchContent, listDirectory, listContentDirectory } from '../utils/github.js'; +import { + fetchRawContent, + fetchContent, + listContentDirectory, + fetchRegistry as fetchRegistryManifest, +} from '../utils/github.js'; import { convert } from '../core/converter.js'; import { isAssetType, parseAssetFrontmatter } from '../core/assets.js'; import { fileExists } from '../utils/fs.js'; @@ -17,7 +22,7 @@ import { spinnerTask, isInteractiveSession, } from '../utils/prompt.js'; -import * as cache from '../utils/cache.js'; +import { filterRegistryByTag, searchRegistry, type Registry, type RegistryRule } from '../utils/registry.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; import type { PulledEntry, AssetEntry, AssetType } from '../bridges/types.js'; @@ -32,6 +37,8 @@ export function pluralRules(count: number): string { export interface AddOptions { list?: boolean; + search?: string; + tag?: string; noCompile?: boolean; force?: boolean; dryRun?: boolean; @@ -52,71 +59,129 @@ export function validateInput(input: string): { category: string; name: string } interface CachedRegistry { categories: Array<{ name: string; - rules: Array<{ name: string; description: string }>; + rules: Array<{ name: string; description: string; version: string; path: string; tags: string[] }>; }>; + assets: Registry['assets']; } -export async function fetchRegistry(cwd: string): Promise { - const cached = await cache.getFromDisk(cwd, 'registry'); +function toCategoryName(path: string): string { + const slashIdx = path.indexOf('/'); + if (slashIdx <= 0) { + return path; + } + return path.slice(0, slashIdx); +} + +function toRuleName(path: string): string { + const slashIdx = path.indexOf('/'); + if (slashIdx < 0 || slashIdx === path.length - 1) { + return path; + } + return path.slice(slashIdx + 1); +} + +function buildCachedRegistry(registry: Registry, rules: RegistryRule[]): CachedRegistry { + const categoryMap = new Map(); + + for (const rule of rules) { + const category = toCategoryName(rule.path); + const ruleEntry = { + name: toRuleName(rule.path), + description: rule.description, + version: rule.version, + path: rule.path, + tags: rule.tags, + }; + + const existingCategory = categoryMap.get(category); + if (existingCategory) { + existingCategory.rules.push(ruleEntry); + continue; + } + + categoryMap.set(category, { + name: category, + rules: [ruleEntry], + }); + } + + const categories = [...categoryMap.values()].sort((a, b) => a.name.localeCompare(b.name)); + for (const category of categories) { + category.rules.sort((a, b) => a.name.localeCompare(b.name)); + } - if (cached) return cached; + return { + categories, + assets: registry.assets, + }; +} +export async function fetchRegistry(cwd: string): Promise { ui.info('Fetching available rules from GitHub...'); ui.newline(); - let topLevel; try { - topLevel = await spinnerTask({ - label: 'Fetching rule categories', - task: async () => listDirectory(), + const manifest = await spinnerTask({ + label: 'Fetching registry manifest', + task: async () => fetchRegistryManifest(cwd), }); + + return buildCachedRegistry(manifest, manifest.rules); } catch (err) { const msg = err instanceof Error ? err.message : String(err); ui.error(`Could not fetch rule registry: ${msg}`); return null; } +} - const dirs = topLevel.filter((e) => e.type === 'dir'); +function applyRuleFilters( + manifest: Registry, + searchTerm: string | undefined, + tag: string | undefined, +): RegistryRule[] { + let filtered = manifest.rules; - const categoryResults = await Promise.all( - dirs.map(async (entry) => { - try { - const files = await listDirectory(entry.name); - const ruleFiles = files.filter((f) => f.type === 'file'); - - const rules = await Promise.all( - ruleFiles.map(async (file) => { - try { - const content = await fetchRawContent(`${entry.name}/${file.name}`); - const fmMatch = /^---\n([\s\S]*?)\n---/.exec(content); - if (fmMatch?.[1]) { - const fm = parse(fmMatch[1]) as Record; - const description = typeof fm['description'] === 'string' ? fm['description'] : ''; - return { name: file.name, description }; - } - return { name: file.name, description: '' }; - } catch { - return { name: file.name, description: '' }; - } - }), - ); + if (tag && tag.trim().length > 0) { + const taggedRegistry: Registry = { + ...manifest, + rules: filtered, + }; + filtered = filterRegistryByTag(taggedRegistry, tag); + } - return rules.length > 0 ? { name: entry.name, rules } : null; - } catch { - return null; - } - }), - ); + if (searchTerm && searchTerm.trim().length > 0) { + const searchedRegistry: Registry = { + ...manifest, + rules: filtered, + }; + filtered = searchRegistry(searchedRegistry, searchTerm); + } - const categories = categoryResults.filter((c): c is NonNullable => c !== null); - const registry: CachedRegistry = { categories }; - await cache.set(cwd, 'registry', registry); - return registry; + return filtered; } -async function runList(categoryFilter: string | undefined): Promise { +async function runList( + categoryFilter: string | undefined, + searchTerm: string | undefined, + tag: string | undefined, +): Promise { const cwd = process.cwd(); - const registry = await fetchRegistry(cwd); + let manifest: Registry; + + try { + manifest = await spinnerTask({ + label: 'Fetching registry manifest', + task: async () => fetchRegistryManifest(cwd), + }); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + ui.error(`Could not fetch rule registry: ${msg}`); + process.exitCode = 1; + return; + } + + const filteredRules = applyRuleFilters(manifest, searchTerm, tag); + const registry = buildCachedRegistry(manifest, filteredRules); if (!registry) { process.exitCode = 1; @@ -128,7 +193,9 @@ async function runList(categoryFilter: string | undefined): Promise { : registry.categories; if (displayCategories.length === 0) { - if (categoryFilter) { + if (searchTerm || tag) { + ui.warn('No rules matched the applied filters'); + } else if (categoryFilter) { ui.warn(`Category "${categoryFilter}" not found`); } else { ui.warn('No rules available'); @@ -152,27 +219,23 @@ async function runList(categoryFilter: string | undefined): Promise { // Show available assets if not filtering by category if (!categoryFilter) { - const assetTypes = ['commands', 'templates', 'hooks', 'presets'] as const; - const assetResults = await Promise.allSettled( - assetTypes.map((dir) => listContentDirectory(dir)), - ); + const assetEntries = [ + { type: 'command', names: registry.assets.commands }, + { type: 'template', names: registry.assets.templates }, + { type: 'hook', names: registry.assets.hooks }, + { type: 'preset', names: registry.assets.presets }, + ]; - const hasAnyAssets = assetResults.some( - (r) => r.status === 'fulfilled' && r.value.some((e) => e.type === 'file'), - ); + const hasAnyAssets = assetEntries.some((entry) => entry.names.length > 0); if (hasAnyAssets) { ui.newline(); ui.header('Available assets'); ui.newline(); - for (let i = 0; i < assetTypes.length; i++) { - const type = assetTypes[i]!; - const result = assetResults[i]!; - if (result.status !== 'fulfilled') continue; - const names = result.value.filter((e) => e.type === 'file').map((e) => e.name); + for (const entry of assetEntries) { + const names = entry.names; if (names.length === 0) continue; - const singular = type.replace(/s$/, ''); - console.log(` ${pc.cyan(`${singular}/`)}`); + console.log(` ${pc.cyan(`${entry.type}/`)}`); for (const name of names) { console.log(` ${pc.white(name)}`); } @@ -260,6 +323,54 @@ function getAssetContentPath(type: AssetType, name: string): string { return `${type}s/${name}.${ext}`; } +function parseSemver(version: string): [number, number, number] | null { + const match = /^(\d+)\.(\d+)\.(\d+)(?:[-+].*)?$/.exec(version.trim()); + if (!match) { + return null; + } + + const major = Number.parseInt(match[1] ?? '', 10); + const minor = Number.parseInt(match[2] ?? '', 10); + const patch = Number.parseInt(match[3] ?? '', 10); + + if (Number.isNaN(major) || Number.isNaN(minor) || Number.isNaN(patch)) { + return null; + } + + return [major, minor, patch]; +} + +function compareSemver(a: string, b: string): number { + const parsedA = parseSemver(a); + const parsedB = parseSemver(b); + + if (!parsedA || !parsedB) { + return a.localeCompare(b, undefined, { numeric: true }); + } + + const [majorA, minorA, patchA] = parsedA; + const [majorB, minorB, patchB] = parsedB; + + if (majorA !== majorB) { + return majorA - majorB; + } + + if (minorA !== minorB) { + return minorA - minorB; + } + + if (patchA !== patchB) { + return patchA - patchB; + } + + return 0; +} + +interface RuleVersionCheck { + installedVersion?: string; + registryVersion?: string; +} + export async function downloadAndInstallAsset( cwd: string, type: AssetType, @@ -347,6 +458,7 @@ async function downloadAndInstall( category: string, name: string, options: AddOptions, + versionCheck?: RuleVersionCheck, ): Promise { const source = `${category}/${name}`; const fileName = `pulled-${category}-${name}.yml`; @@ -378,6 +490,36 @@ async function downloadAndInstall( } if (await fileExists(filePath)) { + const installedVersion = versionCheck?.installedVersion; + const registryVersion = versionCheck?.registryVersion; + + if (installedVersion && registryVersion) { + const comparison = compareSemver(registryVersion, installedVersion); + + if (comparison === 0) { + ui.success(`Already up to date (${source} v${registryVersion})`); + return false; + } + + if (comparison > 0 && !options.force) { + ui.newline(); + ui.info(`${source} update available (v${installedVersion} ${ICONS.arrow} v${registryVersion})`); + try { + const shouldUpdate = await confirmPrompt({ + message: 'Install update?', + defaultValue: true, + }); + if (!shouldUpdate) { + ui.error('Cancelled'); + return false; + } + } catch { + ui.error('Cancelled'); + return false; + } + } + } + try { const existingRaw = await readFile(filePath, 'utf-8'); const existingDoc = parse(existingRaw) as Record; @@ -740,9 +882,36 @@ export async function installPreset( return anyAdded; } +async function resolveRuleVersionCheck(cwd: string, source: string): Promise { + let installedVersion: string | undefined; + try { + const config = await readConfig(cwd); + installedVersion = config.pulled.find((entry) => entry.path === source)?.version; + } catch { + installedVersion = undefined; + } + + let registryVersion: string | undefined; + try { + const registry = await fetchRegistryManifest(cwd); + registryVersion = registry.rules.find((rule) => rule.path === source)?.version; + } catch { + registryVersion = undefined; + } + + if (!installedVersion && !registryVersion) { + return undefined; + } + + return { + installedVersion, + registryVersion, + }; +} + export async function runAdd(ruleArg: string | undefined, options: AddOptions): Promise { if (options.list) { - await runList(ruleArg); + await runList(ruleArg, options.search, options.tag); return; } @@ -813,7 +982,9 @@ export async function runAdd(ruleArg: string | undefined, options: AddOptions): return; } - const added = await downloadAndInstall(cwd, category, name, options); + const source = `${category}/${name}`; + const versionCheck = await resolveRuleVersionCheck(cwd, source); + const added = await downloadAndInstall(cwd, category, name, options, versionCheck); if (added && !options.noCompile) { const { runCompileFromAdd } = await import('./compile.js'); @@ -829,6 +1000,8 @@ export function registerAddCommand(program: Command): void { .argument('[rule]', 'Rule path: /') .description('Add rules from the dev-workflows registry') .option('--list', 'List available rules') + .option('--search ', 'Filter listed rules by search terms') + .option('--tag ', 'Filter listed rules by tag') .option('--no-compile', 'Skip auto-compile after adding') .option('--force', 'Overwrite without asking') .option('--dry-run', 'Show output without writing files') diff --git a/packages/cli/src/utils/banner.ts b/packages/cli/src/utils/banner.ts index 850f9d5..f2749d3 100644 --- a/packages/cli/src/utils/banner.ts +++ b/packages/cli/src/utils/banner.ts @@ -1,9 +1,10 @@ const BANNER_LINES = [ - ' ____ __ __ __ __', - '| _ \\ ___ _ _\\ \\ / /__ _ __| | __ / _| ___ __ _', - "| | | |/ _ \\ | | |\\ \\ / / _ \\| '__| |/ /| |_ / _ \\/ _` |", - '| |_| | __/ |_| | \\ \\/ / (_) | | | < | _| __/ (_| |', - '|____/ \\___|\\__,_| \\__/ \\___/|_| |_|\\_\\|_| \\___|\\__,_|', + "██████╗ ███████╗██╗ ██╗██╗ ██╗", + "██╔══██╗██╔════╝██║ ██║██║ ██║", + "██║ ██║█████╗ ██║ ██║██║ █╗ ██║", + "██║ ██║██╔══╝ ╚██╗ ██╔╝██║███╗██║", + "██████╔╝███████╗ ╚████╔╝ ╚███╔███╔╝", + "╚═════╝ ╚══════╝ ╚═══╝ ╚══╝╚══╝", ] as const; const GRADIENT_START = 45; diff --git a/packages/cli/src/utils/cache.ts b/packages/cli/src/utils/cache.ts index 411d733..aaaf373 100644 --- a/packages/cli/src/utils/cache.ts +++ b/packages/cli/src/utils/cache.ts @@ -1,5 +1,7 @@ -import { readFile, writeFile, mkdir } from 'node:fs/promises'; -import { join, dirname } from 'node:path'; +import { mkdir, readFile, writeFile } from 'node:fs/promises'; +import { dirname, join } from 'node:path'; +import * as ui from './ui.js'; +import { fileExists } from './fs.js'; const TTL_MS = 3_600_000; // 1 hour @@ -11,7 +13,32 @@ interface CacheEntry { type CacheStore = Record>; function getCachePath(cwd: string): string { - return join(cwd, '.dwf', '.cache', 'registry.json'); + return join(cwd, '.dwf', '.cache', 'registry-store.json'); +} + +function getETagDataPath(cacheDir: string, cacheKey: string): string { + return join(cacheDir, `${cacheKey}.json`); +} + +function getETagPath(cacheDir: string, cacheKey: string): string { + return join(cacheDir, `${cacheKey}.etag`); +} + +async function readJsonFile(filePath: string): Promise { + try { + const raw = await readFile(filePath, 'utf-8'); + return JSON.parse(raw) as T; + } catch { + return null; + } +} + +async function readTextFile(filePath: string): Promise { + try { + return await readFile(filePath, 'utf-8'); + } catch { + return null; + } } async function readStore(cwd: string): Promise { @@ -53,3 +80,64 @@ export async function set(cwd: string, key: string, value: T): Promise store[key] = { data: value, timestamp: Date.now() }; await writeStore(cwd, store); } + +export interface FetchWithETagResult { + data: T; + fromCache: boolean; +} + +export async function fetchWithETag( + url: string, + cacheDir: string, + cacheKey: string, +): Promise> { + const dataPath = getETagDataPath(cacheDir, cacheKey); + const etagPath = getETagPath(cacheDir, cacheKey); + + const headers: Record = {}; + const cachedETagRaw = await readTextFile(etagPath); + const cachedETag = typeof cachedETagRaw === 'string' ? cachedETagRaw.trim() : ''; + if (cachedETag.length > 0) { + headers['If-None-Match'] = cachedETag; + } + + try { + const response = await fetch(url, { headers }); + + if (response.status === 304) { + const cachedData = await readJsonFile(dataPath); + if (cachedData === null) { + throw new Error(`Server returned 304 for ${cacheKey} but cache file is missing`); + } + + return { data: cachedData, fromCache: true }; + } + + if (!response.ok) { + throw new Error(`Request failed for ${cacheKey} (HTTP ${String(response.status)})`); + } + + const data = (await response.json()) as T; + await mkdir(dirname(dataPath), { recursive: true }); + await writeFile(dataPath, `${JSON.stringify(data, null, 2)}\n`, 'utf-8'); + + const etag = response.headers.get('etag'); + if (etag && etag.trim().length > 0) { + await writeFile(etagPath, `${etag.trim()}\n`, 'utf-8'); + } + + return { data, fromCache: false }; + } catch (error) { + const hasDataCache = await fileExists(dataPath); + if (hasDataCache) { + const cachedData = await readJsonFile(dataPath); + if (cachedData !== null) { + ui.warn('Using cached registry data because the network request failed.'); + return { data: cachedData, fromCache: true }; + } + } + + const reason = error instanceof Error ? error.message : String(error); + throw new Error(`Unable to fetch ${cacheKey}: ${reason}`); + } +} diff --git a/packages/cli/src/utils/github.ts b/packages/cli/src/utils/github.ts index 996f342..a955f69 100644 --- a/packages/cli/src/utils/github.ts +++ b/packages/cli/src/utils/github.ts @@ -1,7 +1,12 @@ +import { join } from 'node:path'; +import { fetchWithETag } from './cache.js'; +import type { Registry, RegistryAssets, RegistryRule } from './registry.js'; + const BRANCH = 'main'; const REPO = 'gpolanco/dev-workflows'; const RAW_BASE = `https://raw.githubusercontent.com/${REPO}/${BRANCH}/content`; const API_BASE = `https://api.github.com/repos/${REPO}/contents/content`; +const REGISTRY_URL = `${RAW_BASE}/registry.json`; export class GitHubError extends Error { constructor( @@ -100,3 +105,108 @@ export async function listDirectory(path?: string): Promise { type: entry.type, })); } + +function isStringArray(value: unknown): value is string[] { + return Array.isArray(value) && value.every((item) => typeof item === 'string'); +} + +function parseRegistryRule(value: unknown): RegistryRule | null { + if (!value || typeof value !== 'object') { + return null; + } + + const record = value as Record; + if ( + typeof record['path'] !== 'string' || + typeof record['name'] !== 'string' || + typeof record['description'] !== 'string' || + typeof record['version'] !== 'string' || + typeof record['scope'] !== 'string' || + !isStringArray(record['tags']) || + typeof record['size_bytes'] !== 'number' + ) { + return null; + } + + return { + path: record['path'], + name: record['name'], + description: record['description'], + version: record['version'], + scope: record['scope'], + tags: record['tags'], + size_bytes: record['size_bytes'], + }; +} + +function parseRegistryAssets(value: unknown): RegistryAssets | null { + if (!value || typeof value !== 'object') { + return null; + } + + const record = value as Record; + if ( + !isStringArray(record['commands']) || + !isStringArray(record['templates']) || + !isStringArray(record['hooks']) || + !isStringArray(record['presets']) + ) { + return null; + } + + return { + commands: record['commands'], + templates: record['templates'], + hooks: record['hooks'], + presets: record['presets'], + }; +} + +function parseRegistry(value: unknown): Registry { + if (!value || typeof value !== 'object') { + throw new GitHubError('Invalid registry.json: expected an object', 0); + } + + const record = value as Record; + const rulesRaw = record['rules']; + if (!Array.isArray(rulesRaw)) { + throw new GitHubError('Invalid registry.json: missing rules array', 0); + } + + const rules: RegistryRule[] = []; + for (const rule of rulesRaw) { + const parsed = parseRegistryRule(rule); + if (parsed === null) { + throw new GitHubError('Invalid registry.json: rule entry has invalid shape', 0); + } + rules.push(parsed); + } + + const assets = parseRegistryAssets(record['assets']); + if (assets === null) { + throw new GitHubError('Invalid registry.json: invalid assets object', 0); + } + + if (typeof record['version'] !== 'number' || typeof record['generated_at'] !== 'string') { + throw new GitHubError('Invalid registry.json: missing version or generated_at', 0); + } + + return { + version: record['version'], + generated_at: record['generated_at'], + rules, + assets, + }; +} + +export async function fetchRegistry(cwd: string): Promise { + const cacheDir = join(cwd, '.dwf', '.cache'); + + try { + const result = await fetchWithETag(REGISTRY_URL, cacheDir, 'registry'); + return parseRegistry(result.data); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new GitHubError(`Could not fetch registry manifest: ${message}`, 0); + } +} diff --git a/packages/cli/src/utils/registry.ts b/packages/cli/src/utils/registry.ts new file mode 100644 index 0000000..b98fcff --- /dev/null +++ b/packages/cli/src/utils/registry.ts @@ -0,0 +1,59 @@ +export interface RegistryRule { + path: string; + name: string; + description: string; + version: string; + scope: string; + tags: string[]; + size_bytes: number; +} + +export interface RegistryAssets { + commands: string[]; + templates: string[]; + hooks: string[]; + presets: string[]; +} + +export interface Registry { + version: number; + generated_at: string; + rules: RegistryRule[]; + assets: RegistryAssets; +} + +export function filterRegistryByTag(registry: Registry, tag: string): RegistryRule[] { + const normalizedTag = tag.trim().toLowerCase(); + if (normalizedTag.length === 0) { + return [...registry.rules]; + } + + return registry.rules.filter((rule) => + rule.tags.some((ruleTag) => ruleTag.toLowerCase() === normalizedTag), + ); +} + +export function searchRegistry(registry: Registry, query: string): RegistryRule[] { + const terms = query + .trim() + .toLowerCase() + .split(/\s+/) + .filter((term) => term.length > 0); + + if (terms.length === 0) { + return [...registry.rules]; + } + + return registry.rules.filter((rule) => { + const searchableFields = [ + rule.name, + rule.description, + rule.path, + ...rule.tags, + ].map((field) => field.toLowerCase()); + + return terms.every((term) => + searchableFields.some((fieldValue) => fieldValue.includes(term)), + ); + }); +} diff --git a/packages/cli/tests/e2e/cli.test.ts b/packages/cli/tests/e2e/cli.test.ts index 6252b21..c3ca300 100644 --- a/packages/cli/tests/e2e/cli.test.ts +++ b/packages/cli/tests/e2e/cli.test.ts @@ -225,13 +225,13 @@ rules: assert.ok(result.stderr.includes('Invalid rule path')); }); - it('remove without pulled rules shows warning', async () => { + it('remove without args in non-TTY shows usage error', async () => { await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); - // Non-TTY, no args, no pulled → should warn + // Non-TTY, no args → should error with usage hint const result = await run(['remove'], tmpDir); - assert.equal(result.exitCode, 0); - assert.ok(result.stdout.includes('Nothing installed to remove')); + assert.equal(result.exitCode, 1); + assert.ok(result.stderr.includes('No rule specified')); }); it('remove with old block format exits with error', async () => { diff --git a/packages/cli/tests/utils/banner.test.ts b/packages/cli/tests/utils/banner.test.ts index 32d523c..a251068 100644 --- a/packages/cli/tests/utils/banner.test.ts +++ b/packages/cli/tests/utils/banner.test.ts @@ -35,11 +35,12 @@ describe('renderBanner', () => { }); const expected = [ - '\u001b[38;5;45m ____ __ __ __ __\u001b[0m', - '\u001b[38;5;84m| _ \\ ___ _ _\\ \\ / /__ _ __| | __ / _| ___ __ _\u001b[0m', - "\u001b[38;5;123m| | | |/ _ \\ | | |\\ \\ / / _ \\| '__| |/ /| |_ / _ \\/ _` |\u001b[0m", - '\u001b[38;5;162m| |_| | __/ |_| | \\ \\/ / (_) | | | < | _| __/ (_| |\u001b[0m', - '\u001b[38;5;201m|____/ \\___|\\__,_| \\__/ \\___/|_| |_|\\_\\|_| \\___|\\__,_|\u001b[0m', + '\u001b[38;5;45m██████╗ ███████╗██╗ ██╗██╗ ██╗\u001b[0m', + '\u001b[38;5;76m██╔══██╗██╔════╝██║ ██║██║ ██║\u001b[0m', + '\u001b[38;5;107m██║ ██║█████╗ ██║ ██║██║ █╗ ██║\u001b[0m', + '\u001b[38;5;139m██║ ██║██╔══╝ ╚██╗ ██╔╝██║███╗██║\u001b[0m', + '\u001b[38;5;170m██████╔╝███████╗ ╚████╔╝ ╚███╔███╔╝\u001b[0m', + '\u001b[38;5;201m╚═════╝ ╚══════╝ ╚═══╝ ╚══╝╚══╝\u001b[0m', ].join('\n'); assert.equal(renderBanner(), expected); diff --git a/packages/cli/tests/utils/cache.edge.test.ts b/packages/cli/tests/utils/cache.edge.test.ts index 34b83ad..6e00e88 100644 --- a/packages/cli/tests/utils/cache.edge.test.ts +++ b/packages/cli/tests/utils/cache.edge.test.ts @@ -3,7 +3,7 @@ import assert from 'node:assert/strict'; import { mkdtemp, rm, mkdir, writeFile, readFile } from 'node:fs/promises'; import { join } from 'node:path'; import { tmpdir } from 'node:os'; -import { get, set, getFromDisk } from '../../src/utils/cache.js'; +import { get, set, getFromDisk, fetchWithETag } from '../../src/utils/cache.js'; describe('cache edge cases', () => { let tempDir: string; @@ -38,7 +38,7 @@ describe('cache edge cases', () => { }, }; await writeFile( - join(tempDir, '.dwf', '.cache', 'registry.json'), + join(tempDir, '.dwf', '.cache', 'registry-store.json'), JSON.stringify(store), 'utf-8', ); @@ -51,7 +51,7 @@ describe('cache edge cases', () => { describe('malformed store', () => { it('readStore returns {} when cache file is a JSON array', async () => { await writeFile( - join(tempDir, '.dwf', '.cache', 'registry.json'), + join(tempDir, '.dwf', '.cache', 'registry-store.json'), JSON.stringify([1, 2, 3]), 'utf-8', ); @@ -82,7 +82,7 @@ describe('cache edge cases', () => { const after = Date.now(); const raw = await readFile( - join(tempDir, '.dwf', '.cache', 'registry.json'), + join(tempDir, '.dwf', '.cache', 'registry-store.json'), 'utf-8', ); const store = JSON.parse(raw) as Record; @@ -99,4 +99,79 @@ describe('cache edge cases', () => { assert.equal(result, null); }); }); + + describe('fetchWithETag', () => { + const originalFetch = globalThis.fetch; + + afterEach(() => { + globalThis.fetch = originalFetch; + }); + + it('writes cache and etag on 200 response', async () => { + globalThis.fetch = async () => + new Response(JSON.stringify({ version: 1 }), { + status: 200, + headers: { + etag: 'W/"abc123"', + 'content-type': 'application/json', + }, + }); + + const cacheDir = join(tempDir, '.dwf', '.cache'); + const result = await fetchWithETag<{ version: number }>('https://example.com/registry.json', cacheDir, 'registry'); + + assert.equal(result.fromCache, false); + assert.deepEqual(result.data, { version: 1 }); + + const dataRaw = await readFile(join(cacheDir, 'registry.json'), 'utf-8'); + const etagRaw = await readFile(join(cacheDir, 'registry.etag'), 'utf-8'); + assert.deepEqual(JSON.parse(dataRaw), { version: 1 }); + assert.equal(etagRaw.trim(), 'W/"abc123"'); + }); + + it('uses local cache when server returns 304', async () => { + const cacheDir = join(tempDir, '.dwf', '.cache'); + await writeFile(join(cacheDir, 'registry.json'), JSON.stringify({ cached: true }), 'utf-8'); + await writeFile(join(cacheDir, 'registry.etag'), 'W/"etag-a"\n', 'utf-8'); + + globalThis.fetch = async (_url, init) => { + const headers = (init?.headers ?? {}) as Record; + assert.equal(headers['If-None-Match'], 'W/"etag-a"'); + return new Response(null, { status: 304 }); + }; + + const result = await fetchWithETag<{ cached: boolean }>('https://example.com/registry.json', cacheDir, 'registry'); + assert.equal(result.fromCache, true); + assert.deepEqual(result.data, { cached: true }); + }); + + it('falls back to cache when network fails', async () => { + const cacheDir = join(tempDir, '.dwf', '.cache'); + await writeFile(join(cacheDir, 'registry.json'), JSON.stringify({ offline: true }), 'utf-8'); + + globalThis.fetch = async () => { + throw new Error('network down'); + }; + + const result = await fetchWithETag<{ offline: boolean }>('https://example.com/registry.json', cacheDir, 'registry'); + assert.equal(result.fromCache, true); + assert.deepEqual(result.data, { offline: true }); + }); + + it('throws clear error when no cache and request fails', async () => { + const cacheDir = join(tempDir, '.dwf', '.cache'); + + globalThis.fetch = async () => { + throw new Error('network down'); + }; + + await assert.rejects( + () => fetchWithETag('https://example.com/registry.json', cacheDir, 'registry'), + (error: Error) => { + assert.match(error.message, /Unable to fetch registry/); + return true; + }, + ); + }); + }); }); diff --git a/packages/cli/tests/utils/cache.test.ts b/packages/cli/tests/utils/cache.test.ts index d326927..716eff3 100644 --- a/packages/cli/tests/utils/cache.test.ts +++ b/packages/cli/tests/utils/cache.test.ts @@ -39,7 +39,7 @@ describe('cache', () => { it('handles corrupted cache file gracefully', async () => { const { writeFile } = await import('node:fs/promises'); - await writeFile(join(tempDir, '.dwf', '.cache', 'registry.json'), 'not json!!!', 'utf-8'); + await writeFile(join(tempDir, '.dwf', '.cache', 'registry-store.json'), 'not json!!!', 'utf-8'); const result = await getFromDisk(tempDir, 'key'); assert.equal(result, null); }); diff --git a/packages/cli/tests/utils/registry.test.ts b/packages/cli/tests/utils/registry.test.ts new file mode 100644 index 0000000..52da7d3 --- /dev/null +++ b/packages/cli/tests/utils/registry.test.ts @@ -0,0 +1,143 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { searchRegistry, filterRegistryByTag } from '../../src/utils/registry.js'; +import type { Registry } from '../../src/utils/registry.js'; + +function makeRegistry(rules: Array<{ path: string; name: string; description: string; tags: string[] }>): Registry { + return { + version: 1, + generated_at: '2026-04-12T00:00:00Z', + rules: rules.map((r) => ({ + ...r, + version: '0.1.0', + scope: 'conventions', + size_bytes: 100, + })), + assets: { commands: [], templates: [], hooks: [], presets: [] }, + }; +} + +const SAMPLE_REGISTRY = makeRegistry([ + { path: 'typescript/strict', name: 'Strict TypeScript', description: 'Enforce strict TypeScript conventions', tags: ['typescript', 'strict', 'types'] }, + { path: 'typescript/react', name: 'React TypeScript', description: 'TypeScript patterns for React components', tags: ['typescript', 'react', 'frontend'] }, + { path: 'security/supabase-rls', name: 'Supabase RLS', description: 'Row-level security policies for Supabase', tags: ['security', 'supabase', 'rls'] }, + { path: 'testing/unit', name: 'Unit Testing', description: 'Best practices for unit testing', tags: ['testing', 'jest', 'vitest'] }, + { path: 'workflow/git', name: 'Git Workflow', description: 'Git branching and commit conventions', tags: ['git', 'workflow'] }, +]); + +describe('searchRegistry', () => { + it('returns all rules for empty query', () => { + const results = searchRegistry(SAMPLE_REGISTRY, ''); + assert.equal(results.length, 5); + }); + + it('returns all rules for whitespace-only query', () => { + const results = searchRegistry(SAMPLE_REGISTRY, ' '); + assert.equal(results.length, 5); + }); + + it('matches by name', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'Supabase'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'security/supabase-rls'); + }); + + it('matches by description', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'branching'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'workflow/git'); + }); + + it('matches by path', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'typescript/strict'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'typescript/strict'); + }); + + it('matches by tag', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'vitest'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'testing/unit'); + }); + + it('is case-insensitive', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'SUPABASE'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'security/supabase-rls'); + }); + + it('supports multi-term AND search', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'typescript react'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'typescript/react'); + }); + + it('returns empty when no terms match', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'python django'); + assert.equal(results.length, 0); + }); + + it('returns empty when only one of AND terms matches', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'typescript django'); + assert.equal(results.length, 0); + }); + + it('matches multiple rules', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'typescript'); + assert.equal(results.length, 2); + const paths = results.map((r) => r.path); + assert.ok(paths.includes('typescript/strict')); + assert.ok(paths.includes('typescript/react')); + }); + + it('handles empty registry', () => { + const empty = makeRegistry([]); + const results = searchRegistry(empty, 'anything'); + assert.equal(results.length, 0); + }); +}); + +describe('filterRegistryByTag', () => { + it('returns all rules for empty tag', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, ''); + assert.equal(results.length, 5); + }); + + it('returns all rules for whitespace-only tag', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, ' '); + assert.equal(results.length, 5); + }); + + it('filters by exact tag match', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, 'security'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'security/supabase-rls'); + }); + + it('is case-insensitive', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, 'REACT'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'typescript/react'); + }); + + it('returns multiple matches', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, 'typescript'); + assert.equal(results.length, 2); + }); + + it('returns empty for non-existent tag', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, 'python'); + assert.equal(results.length, 0); + }); + + it('does not partial-match tags', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, 'type'); + assert.equal(results.length, 0); + }); + + it('handles empty registry', () => { + const empty = makeRegistry([]); + const results = filterRegistryByTag(empty, 'anything'); + assert.equal(results.length, 0); + }); +}); diff --git a/scripts/generate-registry.js b/scripts/generate-registry.js new file mode 100644 index 0000000..157edbf --- /dev/null +++ b/scripts/generate-registry.js @@ -0,0 +1,146 @@ +#!/usr/bin/env node + +import { readdir, readFile, stat, writeFile } from 'node:fs/promises'; +import { join, relative } from 'node:path'; + +const ROOT_DIR = process.cwd(); +const RULES_DIR = join(ROOT_DIR, 'content', 'rules'); +const OUTPUT_PATH = join(ROOT_DIR, 'content', 'registry.json'); + +function parseScalar(value) { + const trimmed = value.trim(); + + if ((trimmed.startsWith('"') && trimmed.endsWith('"')) || (trimmed.startsWith("'") && trimmed.endsWith("'"))) { + return trimmed.slice(1, -1); + } + + return trimmed; +} + +function parseTags(raw) { + const value = raw.trim(); + if (!value.startsWith('[') || !value.endsWith(']')) { + return []; + } + + const inner = value.slice(1, -1).trim(); + if (inner.length === 0) { + return []; + } + + return inner + .split(',') + .map((entry) => parseScalar(entry)) + .filter((entry) => entry.length > 0); +} + +function parseFrontmatter(markdown) { + const match = /^---\n([\s\S]*?)\n---/.exec(markdown); + if (!match || !match[1]) { + return null; + } + + const metadata = {}; + for (const line of match[1].split('\n')) { + const trimmed = line.trim(); + if (trimmed.length === 0 || trimmed.startsWith('#')) { + continue; + } + + const separatorIndex = trimmed.indexOf(':'); + if (separatorIndex < 1) { + continue; + } + + const key = trimmed.slice(0, separatorIndex).trim(); + const rawValue = trimmed.slice(separatorIndex + 1).trim(); + + if (key === 'tags') { + metadata.tags = parseTags(rawValue); + continue; + } + + metadata[key] = parseScalar(rawValue); + } + + return metadata; +} + +async function collectMarkdownFiles(dir) { + const entries = await readdir(dir, { withFileTypes: true }); + const files = []; + + for (const entry of entries) { + const absolutePath = join(dir, entry.name); + + if (entry.isDirectory()) { + const nested = await collectMarkdownFiles(absolutePath); + files.push(...nested); + continue; + } + + if (entry.isFile() && entry.name.endsWith('.md')) { + files.push(absolutePath); + } + } + + return files; +} + +function normalizeRulePath(absolutePath) { + const relativePath = relative(RULES_DIR, absolutePath).replaceAll('\\', '/'); + return relativePath.replace(/\.md$/, ''); +} + +async function buildRegistry() { + const markdownFiles = await collectMarkdownFiles(RULES_DIR); + const rules = []; + + for (const filePath of markdownFiles) { + if (filePath.endsWith('/README.md')) { + continue; + } + + const markdown = await readFile(filePath, 'utf-8'); + const frontmatter = parseFrontmatter(markdown); + if (!frontmatter) { + continue; + } + + const fileStats = await stat(filePath); + rules.push({ + path: normalizeRulePath(filePath), + name: typeof frontmatter.name === 'string' ? frontmatter.name : '', + description: typeof frontmatter.description === 'string' ? frontmatter.description : '', + version: typeof frontmatter.version === 'string' ? frontmatter.version : '', + scope: typeof frontmatter.scope === 'string' ? frontmatter.scope : '', + tags: Array.isArray(frontmatter.tags) ? frontmatter.tags : [], + size_bytes: fileStats.size, + }); + } + + rules.sort((a, b) => a.path.localeCompare(b.path)); + + return { + version: 1, + generated_at: new Date().toISOString(), + rules, + assets: { + commands: [], + templates: [], + hooks: [], + presets: [], + }, + }; +} + +async function main() { + const registry = await buildRegistry(); + await writeFile(OUTPUT_PATH, `${JSON.stringify(registry, null, 2)}\n`, 'utf-8'); + console.log(`Generated ${registry.rules.length} rules in content/registry.json`); +} + +main().catch((error) => { + console.error(error instanceof Error ? error.message : String(error)); + process.exitCode = 1; +}); From 35efaa95f3e5edc596e859c10fede56e7b8e3d70 Mon Sep 17 00:00:00 2001 From: Geordano Polanco Date: Sun, 12 Apr 2026 18:55:37 +0200 Subject: [PATCH 18/18] fix(banner): replace rainbow gradient with subtle gray gradient MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Match skills.sh style: ANSI 256-color grays (252→240) instead of cyan-to-magenta rainbow (45→201). --- packages/cli/src/utils/banner.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/utils/banner.ts b/packages/cli/src/utils/banner.ts index f2749d3..4629cce 100644 --- a/packages/cli/src/utils/banner.ts +++ b/packages/cli/src/utils/banner.ts @@ -7,8 +7,8 @@ const BANNER_LINES = [ "╚═════╝ ╚══════╝ ╚═══╝ ╚══╝╚══╝", ] as const; -const GRADIENT_START = 45; -const GRADIENT_END = 201; +const GRADIENT_START = 252; +const GRADIENT_END = 240; function colorizeLine(line: string, color: number): string { return `\u001b[38;5;${String(color)}m${line}\u001b[0m`;