diff --git a/.github/workflows/generate-registry.yml b/.github/workflows/generate-registry.yml new file mode 100644 index 0000000..d6773a8 --- /dev/null +++ b/.github/workflows/generate-registry.yml @@ -0,0 +1,41 @@ +name: Generate Registry + +on: + push: + branches: + - main + paths: + - content/** + paths-ignore: + - content/registry.json + +permissions: + contents: write + +jobs: + generate-registry: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Generate registry artifact + run: node scripts/generate-registry.js + + - name: Commit registry updates + run: | + if git diff --quiet -- content/registry.json; then + echo "No registry changes detected" + exit 0 + fi + + git config user.name "github-actions[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + git add content/registry.json + git commit -m "chore: regenerate registry.json" + git push diff --git a/content/registry.json b/content/registry.json new file mode 100644 index 0000000..0508828 --- /dev/null +++ b/content/registry.json @@ -0,0 +1,187 @@ +{ + "version": 1, + "generated_at": "2026-04-12T15:46:19.045Z", + "rules": [ + { + "path": "css/tailwind", + "name": "tailwind", + "description": "Utility-first Tailwind CSS conventions and design tokens", + "version": "0.1.0", + "scope": "conventions", + "tags": [ + "tailwind", + "css", + "styling" + ], + "size_bytes": 874 + }, + { + "path": "frontend/accessibility", + "name": "accessibility", + "description": "Accessibility best practices for AI coding agents", + "version": "0.1.0", + "scope": "conventions", + "tags": [ + "frontend", + "accessibility", + "a11y", + "html" + ], + "size_bytes": 2781 + }, + { + "path": "frontend/design-guidelines", + "name": "design-guidelines", + "description": "UI design principles for AI coding agents", + "version": "0.1.0", + "scope": "design", + "tags": [ + "frontend", + "design", + "ui", + "ux" + ], + "size_bytes": 3476 + }, + { + "path": "frontend/performance", + "name": "performance", + "description": "Frontend performance optimization rules", + "version": "0.1.0", + "scope": "performance", + "tags": [ + "frontend", + "performance", + "core-web-vitals" + ], + "size_bytes": 2558 + }, + { + "path": "javascript/nextjs", + "name": "nextjs", + "description": "Next.js App Router patterns and React Server Components", + "version": "0.1.0", + "scope": "architecture", + "tags": [ + "nextjs", + "react", + "app-router", + "rsc" + ], + "size_bytes": 1250 + }, + { + "path": "javascript/react", + "name": "react", + "description": "React conventions and best practices for AI coding agents", + "version": "0.1.0", + "scope": "conventions", + "tags": [ + "react", + "frontend", + "components", + "hooks" + ], + "size_bytes": 1314 + }, + { + "path": "security/auth-patterns", + "name": "auth-patterns", + "description": "Authentication and authorization best practices", + "version": "0.1.0", + "scope": "security", + "tags": [ + "security", + "auth", + "authentication", + "authorization" + ], + "size_bytes": 2762 + }, + { + "path": "security/supabase-rls", + "name": "supabase-rls", + "description": "Supabase Row-Level Security enforcement and auth patterns", + "version": "0.1.0", + "scope": "security", + "tags": [ + "supabase", + "rls", + "security", + "database" + ], + "size_bytes": 958 + }, + { + "path": "testing/vitest", + "name": "vitest", + "description": "Vitest testing patterns and best practices", + "version": "0.1.0", + "scope": "testing", + "tags": [ + "vitest", + "testing", + "unit-tests" + ], + "size_bytes": 1174 + }, + { + "path": "typescript/strict", + "name": "strict", + "description": "Strict TypeScript conventions for professional codebases", + "version": "0.1.0", + "scope": "conventions", + "tags": [ + "typescript", + "strict", + "types" + ], + "size_bytes": 1124 + }, + { + "path": "workflow/debugging", + "name": "debugging", + "description": "Systematic debugging methodology for AI coding agents", + "version": "0.1.0", + "scope": "workflow", + "tags": [ + "workflow", + "debugging", + "methodology" + ], + "size_bytes": 2464 + }, + { + "path": "workflow/git-conventions", + "name": "git-conventions", + "description": "Git workflow and commit conventions", + "version": "0.1.0", + "scope": "workflow", + "tags": [ + "git", + "workflow", + "conventions" + ], + "size_bytes": 2010 + }, + { + "path": "workflow/spec-driven", + "name": "spec-driven", + "description": "Spec-driven development workflow: spec, plan, build, ship", + "version": "0.1.0", + "scope": "workflow", + "tags": [ + "workflow", + "spec-driven", + "methodology" + ], + "size_bytes": 1441 + } + ], + "assets": { + "commands": [], + "templates": [], + "hooks": [], + "presets": [] + } +} diff --git a/packages/cli/package.json b/packages/cli/package.json index 1548aaf..804b313 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -52,11 +52,11 @@ "test:e2e": "tsc && tsc -p tsconfig.test.json && find .test-build/tests/e2e -name '*.test.js' -exec node --test {} +" }, "dependencies": { - "@inquirer/prompts": "^7.0.0", + "@clack/prompts": "^0.9.0", "chokidar": "^3.6.0", "commander": "^13.0.0", "yaml": "^2.7.0", - "chalk": "^5.4.0" + "picocolors": "^1.1.0" }, "devDependencies": { "typescript": "^5.7.0", diff --git a/packages/cli/src/bridges/claude.ts b/packages/cli/src/bridges/claude.ts index 42befe8..cf858ca 100644 --- a/packages/cli/src/bridges/claude.ts +++ b/packages/cli/src/bridges/claude.ts @@ -1,28 +1,51 @@ -import type { Bridge, Rule, ProjectConfig } from './types.js'; +import type { DirectoryBridge, Rule, ProjectConfig, ScopeMetadata } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; +import { scopeToFilename } from '../core/scope-filename.js'; + +const GENERATED_COMMENT = ''; + +function buildFrontmatter(metadata?: ScopeMetadata): string { + if (!metadata?.paths || metadata.paths.length === 0) { + return ''; + } -function buildMarkdown(rules: Rule[]): string { const lines: string[] = [ - '# Project Rules', + '---', + 'paths:', ]; + for (const p of metadata.paths) { + lines.push(` - "${p}"`); + } + lines.push('---'); - const filtered = filterRules(rules); - const grouped = groupByScope(filtered); - - for (const [scope, scopeRules] of grouped) { - lines.push('', `## ${formatScopeHeading(scope)}`); - lines.push(''); - for (const rule of scopeRules) { - const contentLines = rule.content.split('\n'); - const first = contentLines[0]; - if (first !== undefined) { - lines.push(`- ${first}`); - } - for (let i = 1; i < contentLines.length; i++) { - const line = contentLines[i]; - if (line !== undefined) { - lines.push(` ${line}`); - } + return lines.join('\n'); +} + +function buildScopeMarkdown(scope: string, rules: Rule[]): string { + const lines: string[] = []; + + // Get metadata from the first rule in the scope (all rules in a scope share metadata) + const metadata = rules[0]?.metadata; + const frontmatter = buildFrontmatter(metadata); + + if (frontmatter) { + lines.push(frontmatter); + } + + lines.push(GENERATED_COMMENT); + lines.push(`# ${formatScopeHeading(scope)}`); + lines.push(''); + + for (const rule of rules) { + const contentLines = rule.content.split('\n'); + const first = contentLines[0]; + if (first !== undefined) { + lines.push(`- ${first}`); + } + for (let i = 1; i < contentLines.length; i++) { + const line = contentLines[i]; + if (line !== undefined) { + lines.push(` ${line}`); } } } @@ -31,14 +54,25 @@ function buildMarkdown(rules: Rule[]): string { return lines.join('\n'); } -export const claudeBridge: Bridge = { +export const claudeBridge: DirectoryBridge = { id: 'claude', - outputPaths: ['CLAUDE.md'], - usesMarkers: true, + kind: 'directory', + outputDir: '.claude/rules', + filePrefix: 'dwf-', + fileExtension: '.md', compile(rules: Rule[], _config: ProjectConfig): Map { const output = new Map(); - output.set('CLAUDE.md', buildMarkdown(rules)); + + const filtered = filterRules(rules); + const grouped = groupByScope(filtered); + + for (const [scope, scopeRules] of grouped) { + const filename = scopeToFilename(scope, 'dwf-', '.md'); + const key = `.claude/rules/${filename}`; + output.set(key, buildScopeMarkdown(scope, scopeRules)); + } + return output; }, }; diff --git a/packages/cli/src/bridges/copilot.ts b/packages/cli/src/bridges/copilot.ts index 5861809..1b886d6 100644 --- a/packages/cli/src/bridges/copilot.ts +++ b/packages/cli/src/bridges/copilot.ts @@ -1,4 +1,4 @@ -import type { Bridge, Rule, ProjectConfig } from './types.js'; +import type { MarkerBridge, Rule, ProjectConfig } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; function buildMarkdown(rules: Rule[]): string { @@ -31,8 +31,9 @@ function buildMarkdown(rules: Rule[]): string { return lines.join('\n'); } -export const copilotBridge: Bridge = { +export const copilotBridge: MarkerBridge = { id: 'copilot', + kind: 'marker', outputPaths: ['.github/copilot-instructions.md'], usesMarkers: true, diff --git a/packages/cli/src/bridges/cursor.ts b/packages/cli/src/bridges/cursor.ts index 8de9656..ecf557d 100644 --- a/packages/cli/src/bridges/cursor.ts +++ b/packages/cli/src/bridges/cursor.ts @@ -1,36 +1,48 @@ -import type { Bridge, Rule, ProjectConfig } from './types.js'; +import type { DirectoryBridge, Rule, ProjectConfig, ScopeMetadata } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; +import { scopeToFilename } from '../core/scope-filename.js'; -const FRONTMATTER = `--- -description: Project rules generated by dev-workflows -globs: -alwaysApply: true ----`; - -function buildMdc(rules: Rule[]): string { - const lines: string[] = [ - FRONTMATTER, - '', - '', - ]; - - const filtered = filterRules(rules); - const grouped = groupByScope(filtered); - - for (const [scope, scopeRules] of grouped) { - lines.push('', `## ${formatScopeHeading(scope)}`); - lines.push(''); - for (const rule of scopeRules) { - const contentLines = rule.content.split('\n'); - const first = contentLines[0]; - if (first !== undefined) { - lines.push(`- ${first}`); - } - for (let i = 1; i < contentLines.length; i++) { - const line = contentLines[i]; - if (line !== undefined) { - lines.push(` ${line}`); - } +const GENERATED_COMMENT = ''; + +function buildFrontmatter(scope: string, metadata?: ScopeMetadata): string { + const hasGlobs = metadata?.globs && metadata.globs.length > 0; + const description = `${formatScopeHeading(scope)} rules generated by dev-workflows`; + + const lines: string[] = ['---']; + lines.push(`description: ${description}`); + + if (hasGlobs && metadata?.globs) { + const globsStr = metadata.globs.map((g) => `"${g}"`).join(', '); + lines.push(`globs: [${globsStr}]`); + lines.push('alwaysApply: false'); + } else { + lines.push('globs:'); + lines.push('alwaysApply: true'); + } + + lines.push('---'); + return lines.join('\n'); +} + +function buildScopeMdc(scope: string, rules: Rule[]): string { + const lines: string[] = []; + + const metadata = rules[0]?.metadata; + lines.push(buildFrontmatter(scope, metadata)); + lines.push(GENERATED_COMMENT); + lines.push(`# ${formatScopeHeading(scope)}`); + lines.push(''); + + for (const rule of rules) { + const contentLines = rule.content.split('\n'); + const first = contentLines[0]; + if (first !== undefined) { + lines.push(`- ${first}`); + } + for (let i = 1; i < contentLines.length; i++) { + const line = contentLines[i]; + if (line !== undefined) { + lines.push(` ${line}`); } } } @@ -39,14 +51,25 @@ function buildMdc(rules: Rule[]): string { return lines.join('\n'); } -export const cursorBridge: Bridge = { +export const cursorBridge: DirectoryBridge = { id: 'cursor', - outputPaths: ['.cursor/rules/devworkflows.mdc'], - usesMarkers: false, + kind: 'directory', + outputDir: '.cursor/rules', + filePrefix: 'dwf-', + fileExtension: '.mdc', compile(rules: Rule[], _config: ProjectConfig): Map { const output = new Map(); - output.set('.cursor/rules/devworkflows.mdc', buildMdc(rules)); + + const filtered = filterRules(rules); + const grouped = groupByScope(filtered); + + for (const [scope, scopeRules] of grouped) { + const filename = scopeToFilename(scope, 'dwf-', '.mdc'); + const key = `.cursor/rules/${filename}`; + output.set(key, buildScopeMdc(scope, scopeRules)); + } + return output; }, }; diff --git a/packages/cli/src/bridges/gemini.ts b/packages/cli/src/bridges/gemini.ts index 34dfdc1..3eb4008 100644 --- a/packages/cli/src/bridges/gemini.ts +++ b/packages/cli/src/bridges/gemini.ts @@ -1,4 +1,4 @@ -import type { Bridge, Rule, ProjectConfig } from './types.js'; +import type { MarkerBridge, Rule, ProjectConfig } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; function buildMarkdown(rules: Rule[]): string { @@ -31,8 +31,9 @@ function buildMarkdown(rules: Rule[]): string { return lines.join('\n'); } -export const geminiBridge: Bridge = { +export const geminiBridge: MarkerBridge = { id: 'gemini', + kind: 'marker', outputPaths: ['GEMINI.md'], usesMarkers: true, diff --git a/packages/cli/src/bridges/types.ts b/packages/cli/src/bridges/types.ts index 0f878b1..a20b5f5 100644 --- a/packages/cli/src/bridges/types.ts +++ b/packages/cli/src/bridges/types.ts @@ -1,3 +1,9 @@ +export interface ScopeMetadata { + globs?: string[]; + paths?: string[]; + trigger?: 'always' | 'glob' | 'manual'; +} + export interface Rule { id: string; scope: string; @@ -7,6 +13,7 @@ export interface Rule { enabled: boolean; sourceBlock?: string; source?: string; + metadata?: ScopeMetadata; } export interface PulledEntry { @@ -26,6 +33,7 @@ export interface ProjectConfig { blocks: string[]; pulled: PulledEntry[]; assets: AssetEntry[]; + global: boolean; } export const ASSET_TYPE = { @@ -43,9 +51,38 @@ export interface AssetEntry { installed_at: string; } -export interface Bridge { +interface BaseBridge { id: string; - outputPaths: string[]; - usesMarkers: boolean; compile(rules: Rule[], config: ProjectConfig): Map; } + +export interface DirectoryBridge extends BaseBridge { + kind: 'directory'; + outputDir: string; + filePrefix: string; + fileExtension: string; +} + +export interface MarkerBridge extends BaseBridge { + kind: 'marker'; + outputPaths: string[]; + usesMarkers: true; +} + +export type Bridge = DirectoryBridge | MarkerBridge; + +export function isDirectoryBridge(bridge: Bridge): bridge is DirectoryBridge { + return bridge.kind === 'directory'; +} + +export function isMarkerBridge(bridge: Bridge): bridge is MarkerBridge { + return bridge.kind === 'marker'; +} + +/** Get the known output paths for a bridge (for MarkerBridge returns outputPaths, for DirectoryBridge returns empty since paths are dynamic). */ +export function getBridgeOutputPaths(bridge: Bridge): string[] { + if (isMarkerBridge(bridge)) { + return bridge.outputPaths; + } + return []; +} diff --git a/packages/cli/src/bridges/windsurf.ts b/packages/cli/src/bridges/windsurf.ts index 49b7d5c..96177c9 100644 --- a/packages/cli/src/bridges/windsurf.ts +++ b/packages/cli/src/bridges/windsurf.ts @@ -1,32 +1,45 @@ -import type { Bridge, Rule, ProjectConfig } from './types.js'; +import type { DirectoryBridge, Rule, ProjectConfig, ScopeMetadata } from './types.js'; import { filterRules, groupByScope, formatScopeHeading } from '../core/helpers.js'; +import { scopeToFilename } from '../core/scope-filename.js'; const WINDSURF_CHAR_LIMIT = 6000; +const GENERATED_COMMENT = ''; -function buildMarkdown(rules: Rule[]): string { - const lines: string[] = [ - '', - '', - '# Project Rules', - ]; - - const filtered = filterRules(rules); - const grouped = groupByScope(filtered); - - for (const [scope, scopeRules] of grouped) { - lines.push('', `## ${formatScopeHeading(scope)}`); - lines.push(''); - for (const rule of scopeRules) { - const contentLines = rule.content.split('\n'); - const first = contentLines[0]; - if (first !== undefined) { - lines.push(`- ${first}`); - } - for (let i = 1; i < contentLines.length; i++) { - const line = contentLines[i]; - if (line !== undefined) { - lines.push(` ${line}`); - } +function buildFrontmatter(metadata?: ScopeMetadata): string { + const trigger = metadata?.trigger ?? 'always'; + const hasGlobs = metadata?.globs && metadata.globs.length > 0; + + const lines: string[] = ['---']; + lines.push(`trigger: ${trigger}`); + + if (hasGlobs && trigger === 'glob' && metadata?.globs) { + const globsStr = metadata.globs.map((g) => `"${g}"`).join(', '); + lines.push(`globs: [${globsStr}]`); + } + + lines.push('---'); + return lines.join('\n'); +} + +function buildScopeMarkdown(scope: string, rules: Rule[]): string { + const lines: string[] = []; + + const metadata = rules[0]?.metadata; + lines.push(buildFrontmatter(metadata)); + lines.push(GENERATED_COMMENT); + lines.push(`# ${formatScopeHeading(scope)}`); + lines.push(''); + + for (const rule of rules) { + const contentLines = rule.content.split('\n'); + const first = contentLines[0]; + if (first !== undefined) { + lines.push(`- ${first}`); + } + for (let i = 1; i < contentLines.length; i++) { + const line = contentLines[i]; + if (line !== undefined) { + lines.push(` ${line}`); } } } @@ -36,21 +49,32 @@ function buildMarkdown(rules: Rule[]): string { if (content.length > WINDSURF_CHAR_LIMIT) { console.warn( - `Warning: Windsurf output is ${String(content.length)} chars (limit: ${String(WINDSURF_CHAR_LIMIT)}). Windsurf may truncate the content.`, + `Warning: Windsurf file for scope "${scope}" is ${String(content.length)} chars (limit: ${String(WINDSURF_CHAR_LIMIT)}). Windsurf may truncate the content.`, ); } return content; } -export const windsurfBridge: Bridge = { +export const windsurfBridge: DirectoryBridge = { id: 'windsurf', - outputPaths: ['.windsurf/rules/devworkflows.md'], - usesMarkers: false, + kind: 'directory', + outputDir: '.windsurf/rules', + filePrefix: 'dwf-', + fileExtension: '.md', compile(rules: Rule[], _config: ProjectConfig): Map { const output = new Map(); - output.set('.windsurf/rules/devworkflows.md', buildMarkdown(rules)); + + const filtered = filterRules(rules); + const grouped = groupByScope(filtered); + + for (const [scope, scopeRules] of grouped) { + const filename = scopeToFilename(scope, 'dwf-', '.md'); + const key = `.windsurf/rules/${filename}`; + output.set(key, buildScopeMarkdown(scope, scopeRules)); + } + return output; }, }; diff --git a/packages/cli/src/commands/add.ts b/packages/cli/src/commands/add.ts index bbc75ad..26d696b 100644 --- a/packages/cli/src/commands/add.ts +++ b/packages/cli/src/commands/add.ts @@ -1,15 +1,28 @@ import { join } from 'node:path'; import { readFile, writeFile, mkdir } from 'node:fs/promises'; import type { Command } from 'commander'; -import chalk from 'chalk'; +import pc from 'picocolors'; import { stringify, parse } from 'yaml'; -import { select, checkbox, confirm } from '@inquirer/prompts'; -import { fetchRawContent, fetchContent, listDirectory, listContentDirectory } from '../utils/github.js'; +import { + fetchRawContent, + fetchContent, + listContentDirectory, + fetchRegistry as fetchRegistryManifest, +} from '../utils/github.js'; import { convert } from '../core/converter.js'; import { isAssetType, parseAssetFrontmatter } from '../core/assets.js'; import { fileExists } from '../utils/fs.js'; import { readConfig } from '../core/parser.js'; -import * as cache from '../utils/cache.js'; +import { + selectPrompt, + multiselectPrompt, + confirmPrompt, + introPrompt, + outroPrompt, + spinnerTask, + isInteractiveSession, +} from '../utils/prompt.js'; +import { filterRegistryByTag, searchRegistry, type Registry, type RegistryRule } from '../utils/registry.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; import type { PulledEntry, AssetEntry, AssetType } from '../bridges/types.js'; @@ -24,6 +37,8 @@ export function pluralRules(count: number): string { export interface AddOptions { list?: boolean; + search?: string; + tag?: string; noCompile?: boolean; force?: boolean; dryRun?: boolean; @@ -44,68 +59,129 @@ export function validateInput(input: string): { category: string; name: string } interface CachedRegistry { categories: Array<{ name: string; - rules: Array<{ name: string; description: string }>; + rules: Array<{ name: string; description: string; version: string; path: string; tags: string[] }>; }>; + assets: Registry['assets']; } -export async function fetchRegistry(cwd: string): Promise { - const cached = await cache.getFromDisk(cwd, 'registry'); +function toCategoryName(path: string): string { + const slashIdx = path.indexOf('/'); + if (slashIdx <= 0) { + return path; + } + return path.slice(0, slashIdx); +} + +function toRuleName(path: string): string { + const slashIdx = path.indexOf('/'); + if (slashIdx < 0 || slashIdx === path.length - 1) { + return path; + } + return path.slice(slashIdx + 1); +} + +function buildCachedRegistry(registry: Registry, rules: RegistryRule[]): CachedRegistry { + const categoryMap = new Map(); + + for (const rule of rules) { + const category = toCategoryName(rule.path); + const ruleEntry = { + name: toRuleName(rule.path), + description: rule.description, + version: rule.version, + path: rule.path, + tags: rule.tags, + }; + + const existingCategory = categoryMap.get(category); + if (existingCategory) { + existingCategory.rules.push(ruleEntry); + continue; + } + + categoryMap.set(category, { + name: category, + rules: [ruleEntry], + }); + } - if (cached) return cached; + const categories = [...categoryMap.values()].sort((a, b) => a.name.localeCompare(b.name)); + for (const category of categories) { + category.rules.sort((a, b) => a.name.localeCompare(b.name)); + } + + return { + categories, + assets: registry.assets, + }; +} +export async function fetchRegistry(cwd: string): Promise { ui.info('Fetching available rules from GitHub...'); ui.newline(); - let topLevel; try { - topLevel = await listDirectory(); + const manifest = await spinnerTask({ + label: 'Fetching registry manifest', + task: async () => fetchRegistryManifest(cwd), + }); + + return buildCachedRegistry(manifest, manifest.rules); } catch (err) { const msg = err instanceof Error ? err.message : String(err); ui.error(`Could not fetch rule registry: ${msg}`); return null; } +} - const dirs = topLevel.filter((e) => e.type === 'dir'); +function applyRuleFilters( + manifest: Registry, + searchTerm: string | undefined, + tag: string | undefined, +): RegistryRule[] { + let filtered = manifest.rules; - const categoryResults = await Promise.all( - dirs.map(async (entry) => { - try { - const files = await listDirectory(entry.name); - const ruleFiles = files.filter((f) => f.type === 'file'); - - const rules = await Promise.all( - ruleFiles.map(async (file) => { - try { - const content = await fetchRawContent(`${entry.name}/${file.name}`); - const fmMatch = /^---\n([\s\S]*?)\n---/.exec(content); - if (fmMatch?.[1]) { - const fm = parse(fmMatch[1]) as Record; - const description = typeof fm['description'] === 'string' ? fm['description'] : ''; - return { name: file.name, description }; - } - return { name: file.name, description: '' }; - } catch { - return { name: file.name, description: '' }; - } - }), - ); + if (tag && tag.trim().length > 0) { + const taggedRegistry: Registry = { + ...manifest, + rules: filtered, + }; + filtered = filterRegistryByTag(taggedRegistry, tag); + } - return rules.length > 0 ? { name: entry.name, rules } : null; - } catch { - return null; - } - }), - ); + if (searchTerm && searchTerm.trim().length > 0) { + const searchedRegistry: Registry = { + ...manifest, + rules: filtered, + }; + filtered = searchRegistry(searchedRegistry, searchTerm); + } - const categories = categoryResults.filter((c): c is NonNullable => c !== null); - const registry: CachedRegistry = { categories }; - await cache.set(cwd, 'registry', registry); - return registry; + return filtered; } -async function runList(categoryFilter: string | undefined): Promise { +async function runList( + categoryFilter: string | undefined, + searchTerm: string | undefined, + tag: string | undefined, +): Promise { const cwd = process.cwd(); - const registry = await fetchRegistry(cwd); + let manifest: Registry; + + try { + manifest = await spinnerTask({ + label: 'Fetching registry manifest', + task: async () => fetchRegistryManifest(cwd), + }); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + ui.error(`Could not fetch rule registry: ${msg}`); + process.exitCode = 1; + return; + } + + const filteredRules = applyRuleFilters(manifest, searchTerm, tag); + const registry = buildCachedRegistry(manifest, filteredRules); if (!registry) { process.exitCode = 1; @@ -117,7 +193,9 @@ async function runList(categoryFilter: string | undefined): Promise { : registry.categories; if (displayCategories.length === 0) { - if (categoryFilter) { + if (searchTerm || tag) { + ui.warn('No rules matched the applied filters'); + } else if (categoryFilter) { ui.warn(`Category "${categoryFilter}" not found`); } else { ui.warn('No rules available'); @@ -129,45 +207,41 @@ async function runList(categoryFilter: string | undefined): Promise { ui.newline(); for (const category of displayCategories) { - console.log(` ${chalk.cyan(`${category.name}/`)}`); + console.log(` ${pc.cyan(`${category.name}/`)}`); for (const rule of category.rules) { - const desc = rule.description ? chalk.dim(` ${rule.description}`) : ''; - console.log(` ${chalk.white(rule.name.padEnd(20))}${desc}`); + const desc = rule.description ? pc.dim(` ${rule.description}`) : ''; + console.log(` ${pc.white(rule.name.padEnd(20))}${desc}`); } ui.newline(); } - console.log(` ${chalk.dim(`Add a rule: devw add /`)}`); + console.log(` ${pc.dim(`Add a rule: devw add /`)}`); // Show available assets if not filtering by category if (!categoryFilter) { - const assetTypes = ['commands', 'templates', 'hooks', 'presets'] as const; - const assetResults = await Promise.allSettled( - assetTypes.map((dir) => listContentDirectory(dir)), - ); + const assetEntries = [ + { type: 'command', names: registry.assets.commands }, + { type: 'template', names: registry.assets.templates }, + { type: 'hook', names: registry.assets.hooks }, + { type: 'preset', names: registry.assets.presets }, + ]; - const hasAnyAssets = assetResults.some( - (r) => r.status === 'fulfilled' && r.value.some((e) => e.type === 'file'), - ); + const hasAnyAssets = assetEntries.some((entry) => entry.names.length > 0); if (hasAnyAssets) { ui.newline(); ui.header('Available assets'); ui.newline(); - for (let i = 0; i < assetTypes.length; i++) { - const type = assetTypes[i]!; - const result = assetResults[i]!; - if (result.status !== 'fulfilled') continue; - const names = result.value.filter((e) => e.type === 'file').map((e) => e.name); + for (const entry of assetEntries) { + const names = entry.names; if (names.length === 0) continue; - const singular = type.replace(/s$/, ''); - console.log(` ${chalk.cyan(`${singular}/`)}`); + console.log(` ${pc.cyan(`${entry.type}/`)}`); for (const name of names) { - console.log(` ${chalk.white(name)}`); + console.log(` ${pc.white(name)}`); } ui.newline(); } - console.log(` ${chalk.dim(`Add an asset: devw add command/`)}`); + console.log(` ${pc.dim(`Add an asset: devw add command/`)}`); } } } @@ -249,6 +323,54 @@ function getAssetContentPath(type: AssetType, name: string): string { return `${type}s/${name}.${ext}`; } +function parseSemver(version: string): [number, number, number] | null { + const match = /^(\d+)\.(\d+)\.(\d+)(?:[-+].*)?$/.exec(version.trim()); + if (!match) { + return null; + } + + const major = Number.parseInt(match[1] ?? '', 10); + const minor = Number.parseInt(match[2] ?? '', 10); + const patch = Number.parseInt(match[3] ?? '', 10); + + if (Number.isNaN(major) || Number.isNaN(minor) || Number.isNaN(patch)) { + return null; + } + + return [major, minor, patch]; +} + +function compareSemver(a: string, b: string): number { + const parsedA = parseSemver(a); + const parsedB = parseSemver(b); + + if (!parsedA || !parsedB) { + return a.localeCompare(b, undefined, { numeric: true }); + } + + const [majorA, minorA, patchA] = parsedA; + const [majorB, minorB, patchB] = parsedB; + + if (majorA !== majorB) { + return majorA - majorB; + } + + if (minorA !== minorB) { + return minorA - minorB; + } + + if (patchA !== patchB) { + return patchA - patchB; + } + + return 0; +} + +interface RuleVersionCheck { + installedVersion?: string; + registryVersion?: string; +} + export async function downloadAndInstallAsset( cwd: string, type: AssetType, @@ -265,7 +387,10 @@ export async function downloadAndInstallAsset( let content: string; try { - content = await fetchContent(getAssetContentPath(type, name)); + content = await spinnerTask({ + label: `Fetching ${source}`, + task: async () => fetchContent(getAssetContentPath(type, name)), + }); } catch (err) { const msg = err instanceof Error ? err.message : String(err); ui.error(msg); @@ -290,9 +415,9 @@ export async function downloadAndInstallAsset( if (!options.force) { ui.info(`${source} already exists locally`); try { - const shouldOverwrite = await confirm({ + const shouldOverwrite = await confirmPrompt({ message: 'Overwrite?', - default: true, + defaultValue: true, }); if (!shouldOverwrite) { ui.error('Cancelled'); @@ -309,7 +434,7 @@ export async function downloadAndInstallAsset( ui.newline(); ui.header('Dry run — would write:'); ui.newline(); - console.log(chalk.dim(` .dwf/assets/${type}s/${fileName}`)); + console.log(pc.dim(` .dwf/assets/${type}s/${fileName}`)); return false; } @@ -333,6 +458,7 @@ async function downloadAndInstall( category: string, name: string, options: AddOptions, + versionCheck?: RuleVersionCheck, ): Promise { const source = `${category}/${name}`; const fileName = `pulled-${category}-${name}.yml`; @@ -342,7 +468,10 @@ async function downloadAndInstall( let markdown: string; try { - markdown = await fetchRawContent(source); + markdown = await spinnerTask({ + label: `Fetching ${source}`, + task: async () => fetchRawContent(source), + }); } catch (err) { const msg = err instanceof Error ? err.message : String(err); ui.error(msg); @@ -361,6 +490,36 @@ async function downloadAndInstall( } if (await fileExists(filePath)) { + const installedVersion = versionCheck?.installedVersion; + const registryVersion = versionCheck?.registryVersion; + + if (installedVersion && registryVersion) { + const comparison = compareSemver(registryVersion, installedVersion); + + if (comparison === 0) { + ui.success(`Already up to date (${source} v${registryVersion})`); + return false; + } + + if (comparison > 0 && !options.force) { + ui.newline(); + ui.info(`${source} update available (v${installedVersion} ${ICONS.arrow} v${registryVersion})`); + try { + const shouldUpdate = await confirmPrompt({ + message: 'Install update?', + defaultValue: true, + }); + if (!shouldUpdate) { + ui.error('Cancelled'); + return false; + } + } catch { + ui.error('Cancelled'); + return false; + } + } + } + try { const existingRaw = await readFile(filePath, 'utf-8'); const existingDoc = parse(existingRaw) as Record; @@ -376,10 +535,10 @@ async function downloadAndInstall( ui.newline(); ui.info(`${source} already exists locally (v${existingVersion} ${ICONS.arrow} v${result.version})`); try { - const shouldOverwrite = await confirm({ - message: 'Overwrite with new version?', - default: true, - }); + const shouldOverwrite = await confirmPrompt({ + message: 'Overwrite with new version?', + defaultValue: true, + }); if (!shouldOverwrite) { ui.error('Cancelled'); return false; @@ -401,7 +560,7 @@ async function downloadAndInstall( ui.newline(); ui.header('Dry run — would write:'); ui.newline(); - console.log(chalk.dim(` ${fileName}`)); + console.log(pc.dim(` ${fileName}`)); ui.newline(); console.log(yamlOutput); return false; @@ -422,15 +581,16 @@ async function downloadAndInstall( } async function runInteractiveAsset(cwd: string, options: AddOptions): Promise { + introPrompt('Add assets'); let assetType: AssetType | 'preset'; try { - assetType = await select({ + assetType = await selectPrompt({ message: 'Asset type', - choices: [ - { name: 'command — Slash commands for Claude Code', value: 'command' }, - { name: 'template — Spec and document templates', value: 'template' }, - { name: 'hook — Editor hooks (auto-format, etc.)', value: 'hook' }, - { name: 'preset — Bundle of rules + assets', value: 'preset' }, + options: [ + { label: 'command — Slash commands for Claude Code', value: 'command' }, + { label: 'template — Spec and document templates', value: 'template' }, + { label: 'hook — Editor hooks (auto-format, etc.)', value: 'hook' }, + { label: 'preset — Bundle of rules + assets', value: 'preset' }, ], }); } catch { @@ -458,9 +618,9 @@ async function runInteractiveAsset(cwd: string, options: AddOptions): Promise({ + selected = await multiselectPrompt({ message: `Select ${assetType}s to install`, - choices: names.map((name) => ({ name, value: name })), + options: names.map((name) => ({ label: name, value: name })), }); } catch { ui.error('Cancelled'); @@ -487,16 +647,19 @@ async function runInteractiveAsset(cwd: string, options: AddOptions): Promise { + introPrompt('Add rules or assets'); let mode: 'rules' | 'assets'; try { - mode = await select<'rules' | 'assets'>({ + mode = await selectPrompt<'rules' | 'assets'>({ message: 'What do you want to add?', - choices: [ - { name: 'Rules — Install rules from the registry', value: 'rules' }, - { name: 'Assets — Commands, templates, hooks, presets', value: 'assets' }, + options: [ + { label: 'Rules — Install rules from the registry', value: 'rules' }, + { label: 'Assets — Commands, templates, hooks, presets', value: 'assets' }, ], }); } catch { @@ -538,15 +701,15 @@ async function runInteractive(cwd: string, options: AddOptions): Promise { ); if (availableCategories.length === 0) break; - const selectedCategoryName = await select({ + const selectedCategoryName = await selectPrompt({ message: 'Choose a category', - choices: availableCategories.map((c) => { + options: availableCategories.map((c) => { const allInstalled = c.rules.every((r) => installedPaths.has(`${c.name}/${r.name}`), ); const label = `${c.name} (${pluralRules(c.rules.length)})`; return { - name: allInstalled ? `${label} ${chalk.dim('(all installed)')}` : label, + label: allInstalled ? `${label} ${pc.dim('(all installed)')}` : label, value: c.name, }; }), @@ -555,17 +718,17 @@ async function runInteractive(cwd: string, options: AddOptions): Promise { const category = registry.categories.find((c) => c.name === selectedCategoryName); if (!category) break; - const selected = await checkbox({ + const selected = await multiselectPrompt({ message: 'Select rules to add', - choices: [ - { name: '\u2190 Back to categories', value: BACK_VALUE }, + options: [ + { label: '\u2190 Back to categories', value: BACK_VALUE }, ...category.rules.map((r) => { const path = `${category.name}/${r.name}`; const installed = installedPaths.has(path); const desc = r.description ? ` ${ICONS.dash} ${r.description}` : ''; - const suffix = installed ? chalk.dim(' (already installed)') : ''; + const suffix = installed ? pc.dim(' (already installed)') : ''; return { - name: `${r.name}${desc}${suffix}`, + label: `${r.name}${desc}${suffix}`, value: r.name, }; }), @@ -595,9 +758,9 @@ async function runInteractive(cwd: string, options: AddOptions): Promise { ); if (remaining.length === 0) break; - const addMore = await confirm({ + const addMore = await confirmPrompt({ message: 'Add rules from another category?', - default: true, + defaultValue: true, }); if (!addMore) break; } @@ -611,15 +774,15 @@ async function runInteractive(cwd: string, options: AddOptions): Promise { ui.newline(); ui.header('Rules to install:'); for (const rule of allSelected) { - const desc = rule.description ? chalk.dim(` ${ICONS.dash} ${rule.description}`) : ''; + const desc = rule.description ? pc.dim(` ${ICONS.dash} ${rule.description}`) : ''; console.log(` ${rule.category}/${rule.name}${desc}`); } ui.newline(); try { - const shouldProceed = await confirm({ + const shouldProceed = await confirmPrompt({ message: `Install ${pluralRules(allSelected.length)}?`, - default: true, + defaultValue: true, }); if (!shouldProceed) { ui.error('Cancelled'); @@ -640,6 +803,8 @@ async function runInteractive(cwd: string, options: AddOptions): Promise { const { runCompileFromAdd } = await import('./compile.js'); await runCompileFromAdd(); } + + outroPrompt('Add flow completed'); } interface PresetManifest { @@ -717,9 +882,36 @@ export async function installPreset( return anyAdded; } +async function resolveRuleVersionCheck(cwd: string, source: string): Promise { + let installedVersion: string | undefined; + try { + const config = await readConfig(cwd); + installedVersion = config.pulled.find((entry) => entry.path === source)?.version; + } catch { + installedVersion = undefined; + } + + let registryVersion: string | undefined; + try { + const registry = await fetchRegistryManifest(cwd); + registryVersion = registry.rules.find((rule) => rule.path === source)?.version; + } catch { + registryVersion = undefined; + } + + if (!installedVersion && !registryVersion) { + return undefined; + } + + return { + installedVersion, + registryVersion, + }; +} + export async function runAdd(ruleArg: string | undefined, options: AddOptions): Promise { if (options.list) { - await runList(ruleArg); + await runList(ruleArg, options.search, options.tag); return; } @@ -732,7 +924,7 @@ export async function runAdd(ruleArg: string | undefined, options: AddOptions): } if (!ruleArg) { - if (!process.stdout.isTTY || !process.stdin.isTTY) { + if (!isInteractiveSession()) { ui.error('No rule specified', 'Usage: devw add /'); process.exitCode = 1; return; @@ -742,6 +934,10 @@ export async function runAdd(ruleArg: string | undefined, options: AddOptions): return; } + if (isInteractiveSession()) { + introPrompt('Adding item'); + } + if (!ruleArg.includes('/')) { const dashIdx = ruleArg.indexOf('-'); const hint = @@ -786,12 +982,16 @@ export async function runAdd(ruleArg: string | undefined, options: AddOptions): return; } - const added = await downloadAndInstall(cwd, category, name, options); + const source = `${category}/${name}`; + const versionCheck = await resolveRuleVersionCheck(cwd, source); + const added = await downloadAndInstall(cwd, category, name, options, versionCheck); if (added && !options.noCompile) { const { runCompileFromAdd } = await import('./compile.js'); await runCompileFromAdd(); } + + outroPrompt('Add command completed'); } export function registerAddCommand(program: Command): void { @@ -800,6 +1000,8 @@ export function registerAddCommand(program: Command): void { .argument('[rule]', 'Rule path: /') .description('Add rules from the dev-workflows registry') .option('--list', 'List available rules') + .option('--search ', 'Filter listed rules by search terms') + .option('--tag ', 'Filter listed rules by tag') .option('--no-compile', 'Skip auto-compile after adding') .option('--force', 'Overwrite without asking') .option('--dry-run', 'Show output without writing files') diff --git a/packages/cli/src/commands/compile.ts b/packages/cli/src/commands/compile.ts index 8b9343b..f206c42 100644 --- a/packages/cli/src/commands/compile.ts +++ b/packages/cli/src/commands/compile.ts @@ -1,20 +1,27 @@ import { mkdir, writeFile, readFile, symlink, unlink } from 'node:fs/promises'; -import { join, dirname } from 'node:path'; +import { join, dirname, basename } from 'node:path'; +import { homedir } from 'node:os'; import type { Command } from 'commander'; -import chalk from 'chalk'; -import { readConfig, readRules } from '../core/parser.js'; +import pc from 'picocolors'; +import { readConfig, readConfigFromDwfDir, readRules } from '../core/parser.js'; +import { mergeRules } from '../core/merge.js'; import { computeRulesHash, writeHash } from '../core/hash.js'; import { deployAssets } from '../core/assets.js'; -import type { Bridge } from '../bridges/types.js'; +import type { Bridge, DirectoryBridge, Rule } from '../bridges/types.js'; +import { isDirectoryBridge, getBridgeOutputPaths } from '../bridges/types.js'; import { claudeBridge } from '../bridges/claude.js'; import { cursorBridge } from '../bridges/cursor.js'; import { geminiBridge } from '../bridges/gemini.js'; import { windsurfBridge } from '../bridges/windsurf.js'; import { copilotBridge } from '../bridges/copilot.js'; import { mergeMarkedContent, removeMarkedBlock } from '../core/markers.js'; +import { cleanStaleFiles } from '../core/scope-filename.js'; +import { detectLegacyFiles, migrateLegacyFiles } from '../core/cleanup.js'; +import { buildCanonicalOutputs, writeCanonical } from '../core/canonical.js'; import { fileExists } from '../utils/fs.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; +import { renderTable } from '../utils/table.js'; export interface CompileOptions { tool?: string; @@ -30,11 +37,27 @@ export interface BridgeResult { content?: string; } +export interface StaleFileResult { + bridgeId: string; + deleted: string[]; +} + +export interface MigrationResult { + actions: string[]; +} + export interface CompileResult { results: BridgeResult[]; activeRuleCount: number; + globalRuleCount: number; + projectRuleCount: number; + overriddenRuleIds: string[]; + canonicalFileCount: number; + canonicalError?: string; assetPaths: string[]; elapsedMs: number; + staleResults: StaleFileResult[]; + migration: MigrationResult; } export interface PipelineOptions { @@ -49,12 +72,92 @@ function getBridge(id: string): Bridge | undefined { return BRIDGES.find((b) => b.id === id); } +function extractFilenameFromPath(relativePath: string): string { + const parts = relativePath.split('/'); + return parts[parts.length - 1] ?? relativePath; +} + +async function handleDirectoryBridgeCleanup( + outputRoot: string, + bridge: DirectoryBridge, + writtenFilenames: Set, + write: boolean, +): Promise { + if (!write) { + return []; + } + + const outputDir = join(outputRoot, bridge.outputDir); + return cleanStaleFiles(outputDir, bridge.filePrefix, bridge.fileExtension, writtenFilenames); +} + +interface CompileContext { + configRoot: string; + outputRoot: string; + globalMode: boolean; +} + +function toCompileSummaryRows(result: CompileResult): string[][] { + const counts = new Map(); + + for (const output of result.results) { + const current = counts.get(output.bridgeId) ?? { success: 0, failed: 0 }; + if (output.success) { + current.success += 1; + } else { + current.failed += 1; + } + counts.set(output.bridgeId, current); + } + + const rows: string[][] = []; + for (const [bridgeId, count] of counts.entries()) { + rows.push([ + bridgeId, + String(count.success), + String(count.failed), + ]); + } + + rows.sort((a, b) => a[0]!.localeCompare(b[0]!)); + return rows; +} + +async function resolveCompileContext(cwd: string): Promise { + const projectConfigPath = join(cwd, '.dwf', 'config.yml'); + if (await fileExists(projectConfigPath)) { + return { + configRoot: cwd, + outputRoot: cwd, + globalMode: false, + }; + } + + const inGlobalConfigDir = basename(cwd) === '.dwf'; + const globalConfigPath = join(cwd, 'config.yml'); + if (inGlobalConfigDir && await fileExists(globalConfigPath)) { + return { + configRoot: cwd, + outputRoot: homedir(), + globalMode: true, + }; + } + + throw new Error('.dwf/config.yml not found. Run devw init to initialize the project'); +} + export async function executePipeline(options: PipelineOptions): Promise { const { cwd, tool, write = true } = options; const startTime = performance.now(); + const context = await resolveCompileContext(cwd); - const config = await readConfig(cwd); - const rules = await readRules(cwd); + const config = context.globalMode ? await readConfigFromDwfDir(context.configRoot) : await readConfig(context.configRoot); + const projectRules = await readRules(context.configRoot); + const globalRules = context.globalMode || config.global === false + ? [] + : await readRules(context.configRoot, join(homedir(), '.dwf', 'rules')); + const rules = mergeRules(globalRules, projectRules); + const overriddenRuleIds = getOverriddenRuleIds(globalRules, projectRules); let toolIds = config.tools; if (tool) { @@ -64,8 +167,19 @@ export async function executePipeline(options: PipelineOptions): Promise 0) { + const actions = await migrateLegacyFiles(context.outputRoot, legacyFiles); + migration.actions = actions; + } + } + const activeRules = rules.filter((r) => r.enabled); const results: BridgeResult[] = []; + const staleResults: StaleFileResult[] = []; for (const toolId of toolIds) { const bridge = getBridge(toolId); @@ -74,12 +188,61 @@ export async function executePipeline(options: PipelineOptions): Promise 0) { + staleResults.push({ bridgeId: bridge.id, deleted }); + } + continue; + } + + const outputs = bridge.compile(rules, config); + const writtenFilenames = new Set(); + + for (const [relativePath, content] of outputs) { + writtenFilenames.add(extractFilenameFromPath(relativePath)); + + if (!write) { + results.push({ bridgeId: bridge.id, outputPath: relativePath, success: true, content }); + continue; + } + + const absolutePath = join(context.outputRoot, relativePath); + await mkdir(dirname(absolutePath), { recursive: true }); + + if (config.mode === 'link') { + const cachePath = join(context.outputRoot, '.dwf', '.cache', relativePath); + await mkdir(dirname(cachePath), { recursive: true }); + await writeFile(cachePath, content, 'utf-8'); + + if (await fileExists(absolutePath)) { + await unlink(absolutePath); + } + await symlink(cachePath, absolutePath); + } else { + await writeFile(absolutePath, content, 'utf-8'); + } + + results.push({ bridgeId: bridge.id, outputPath: relativePath, success: true }); + } + + // Stale file cleanup for DirectoryBridge + const deleted = await handleDirectoryBridgeCleanup(context.outputRoot, bridge, writtenFilenames, write); + if (deleted.length > 0) { + staleResults.push({ bridgeId: bridge.id, deleted }); + } + } else { + // MarkerBridge flow: merge content between markers in target file + if (activeRules.length === 0 && write) { + for (const relativePath of getBridgeOutputPaths(bridge)) { + const absolutePath = join(context.outputRoot, relativePath); + if (!(await fileExists(absolutePath))) { + continue; + } - if (bridge.usesMarkers) { const existing = await readFile(absolutePath, 'utf-8'); const cleaned = removeMarkedBlock(existing); if (cleaned.length === 0) { @@ -87,20 +250,16 @@ export async function executePipeline(options: PipelineOptions): Promise 0) { + for (const relativePath of errorPaths) { + results.push({ bridgeId: bridge.id, outputPath: relativePath, success: false, error: message }); + } + } else { + results.push({ bridgeId: bridge.id, outputPath: bridge.id, success: false, error: message }); } } } + // Canonical output intentionally always runs, even when --tool filters bridges. + // This keeps `.agents/rules/devw` as the source-of-truth for doctor checks and distribution. + const canonicalOutputs = buildCanonicalOutputs(rules); + let canonicalPaths: string[] = []; + let canonicalError: string | undefined; + if (write) { + try { + canonicalPaths = await writeCanonical(context.outputRoot, canonicalOutputs); + for (const relativePath of canonicalPaths) { + results.push({ bridgeId: 'canonical', outputPath: relativePath, success: true }); + } + } catch (err) { + canonicalError = err instanceof Error ? err.message : String(err); + const errorPaths = [...canonicalOutputs.keys()]; + if (errorPaths.length > 0) { + for (const relativePath of errorPaths) { + results.push({ bridgeId: 'canonical', outputPath: relativePath, success: false, error: canonicalError }); + } + } else { + results.push({ bridgeId: 'canonical', outputPath: '.agents/rules/devw', success: false, error: canonicalError }); + } + } + } else { + for (const [relativePath, content] of canonicalOutputs) { + canonicalPaths.push(relativePath); + results.push({ bridgeId: 'canonical', outputPath: relativePath, success: true, content }); + } + } + let assetPaths: string[] = []; if (write) { const hash = computeRulesHash(activeRules); - await writeHash(cwd, hash); + await writeHash(context.outputRoot, hash); - const assetResult = await deployAssets(cwd, config); + const assetResult = await deployAssets(context.outputRoot, config); assetPaths = assetResult.deployed; } const elapsedMs = performance.now() - startTime; - return { results, activeRuleCount: activeRules.length, assetPaths, elapsedMs }; + return { + results, + activeRuleCount: activeRules.length, + globalRuleCount: globalRules.length, + projectRuleCount: projectRules.length, + overriddenRuleIds, + canonicalFileCount: canonicalPaths.length, + canonicalError, + assetPaths, + elapsedMs, + staleResults, + migration, + }; } export async function runCompile(options: CompileOptions): Promise { const cwd = process.cwd(); - if (!(await fileExists(join(cwd, '.dwf', 'config.yml')))) { - ui.error('.dwf/config.yml not found', 'Run devw init to initialize the project'); - process.exitCode = 1; - return; - } - try { + const context = await resolveCompileContext(cwd); + if (options.verbose) { - const config = await readConfig(cwd); - const rules = await readRules(cwd); - ui.keyValue('Project:', chalk.bold(config.project.name)); + const config = context.globalMode ? await readConfigFromDwfDir(context.configRoot) : await readConfig(context.configRoot); + const projectRules = await readRules(context.configRoot); + const globalRules = context.globalMode || config.global === false + ? [] + : await readRules(context.configRoot, join(homedir(), '.dwf', 'rules')); + const mergedRules = mergeRules(globalRules, projectRules); + const overriddenRuleIds = getOverriddenRuleIds(globalRules, projectRules); + + ui.keyValue('Project:', pc.bold(config.project.name)); + ui.keyValue('Scope:', context.globalMode ? 'global (~/.dwf)' : 'project (.dwf)'); ui.keyValue('Mode:', config.mode); - ui.keyValue('Rules:', String(rules.length)); + ui.keyValue('Project rules:', String(projectRules.length)); + if (config.global === false) { + ui.keyValue('Global rules:', 'disabled by config'); + } else { + ui.keyValue('Global rules:', String(globalRules.length)); + } + ui.keyValue('Merged rules:', String(mergedRules.length)); + if (overriddenRuleIds.length > 0) { + ui.keyValue('Project overrides:', String(overriddenRuleIds.length)); + } const toolIds = options.tool ? [options.tool] : config.tools; - ui.keyValue('Tools:', chalk.cyan(toolIds.join(', '))); + ui.keyValue('Tools:', pc.cyan(toolIds.join(', '))); ui.newline(); } if (options.dryRun) { const result = await executePipeline({ cwd, tool: options.tool, write: false }); + + ui.newline(); + ui.info('Dry run — no files written'); + ui.newline(); + for (const br of result.results) { if (br.content !== undefined) { - console.log(chalk.cyan(`--- ${br.outputPath} ---`)); + console.log(pc.cyan(`--- ${br.outputPath} ---`)); console.log(br.content); } } + + // Summary of what would be generated + const fileCount = result.results.filter((r) => r.success).length; + ui.newline(); + ui.info( + `Would generate ${String(fileCount)} file${fileCount !== 1 ? 's' : ''} (${String(result.canonicalFileCount)} canonical) from ${String(result.activeRuleCount)} rules`, + ); return; } const result = await executePipeline({ cwd, tool: options.tool }); + + if (options.tool) { + ui.info('Note: canonical output is always refreshed in .agents/rules/devw'); + } + + if (result.canonicalError) { + ui.warn(`Canonical write failed: ${result.canonicalError}`); + ui.warn('Tool-specific outputs were still written'); + } + + const summaryTable = renderTable( + ['bridge', 'generated', 'failed'], + toCompileSummaryRows(result), + [10, 9, 6], + ); + + // Show migration messages if any + if (result.migration.actions.length > 0) { + ui.newline(); + ui.info('Migrating from single-file to multi-file output...'); + for (const action of result.migration.actions) { + ui.info(` ${action}`); + } + } + const writtenPaths = result.results.filter((r) => r.success).map((r) => r.outputPath); const allPaths = [...writtenPaths, ...result.assetPaths]; ui.newline(); ui.success(`Compiled ${String(result.activeRuleCount)} rules ${ICONS.arrow} ${String(allPaths.length)} file${allPaths.length !== 1 ? 's' : ''} ${ui.timing(result.elapsedMs)}`); + ui.info(`Canonical files: ${String(result.canonicalFileCount)}`); + ui.log(summaryTable); + if (options.verbose && result.overriddenRuleIds.length > 0) { + ui.info(`Project overrides (${String(result.overriddenRuleIds.length)}): ${result.overriddenRuleIds.join(', ')}`); + } ui.newline(); - if (options.verbose && result.assetPaths.length > 0) { + if (options.verbose) { ui.list(writtenPaths); - ui.newline(); - console.log(` ${chalk.dim('Assets deployed:')}`); - ui.list(result.assetPaths); + + if (result.staleResults.length > 0) { + ui.newline(); + console.log(` ${pc.dim('Stale files removed:')}`); + for (const stale of result.staleResults) { + for (const deleted of stale.deleted) { + ui.info(` ${stale.bridgeId}: ${deleted}`); + } + } + } + + if (result.assetPaths.length > 0) { + ui.newline(); + console.log(` ${pc.dim('Assets deployed:')}`); + ui.list(result.assetPaths); + } } else { ui.list(allPaths); } @@ -213,6 +485,22 @@ export async function runCompileFromAdd(): Promise { await runCompile({}); } +function getOverriddenRuleIds(globalRules: Rule[], projectRules: Rule[]): string[] { + const globalIds = new Set(globalRules.map((rule) => rule.id)); + const orderedOverrides: string[] = []; + const seen = new Set(); + + for (const rule of projectRules) { + if (!globalIds.has(rule.id) || seen.has(rule.id)) { + continue; + } + seen.add(rule.id); + orderedOverrides.push(rule.id); + } + + return orderedOverrides; +} + export function registerCompileCommand(program: Command): void { program .command('compile') diff --git a/packages/cli/src/commands/doctor.ts b/packages/cli/src/commands/doctor.ts index b2c0092..9045d63 100644 --- a/packages/cli/src/commands/doctor.ts +++ b/packages/cli/src/commands/doctor.ts @@ -1,5 +1,5 @@ import { lstat, readFile, readdir } from 'node:fs/promises'; -import { join } from 'node:path'; +import { basename, join, relative } from 'node:path'; import type { Command } from 'commander'; import { parse } from 'yaml'; import { readConfig, readRules } from '../core/parser.js'; @@ -9,13 +9,23 @@ import { cursorBridge } from '../bridges/cursor.js'; import { geminiBridge } from '../bridges/gemini.js'; import { windsurfBridge } from '../bridges/windsurf.js'; import { copilotBridge } from '../bridges/copilot.js'; -import type { Bridge, ProjectConfig, PulledEntry, AssetEntry, Rule } from '../bridges/types.js'; +import type { Bridge, DirectoryBridge, ProjectConfig, PulledEntry, AssetEntry, Rule } from '../bridges/types.js'; +import { getBridgeOutputPaths, isDirectoryBridge } from '../bridges/types.js'; import { fileExists } from '../utils/fs.js'; import { isValidScope } from '../core/schema.js'; +import { buildCanonicalOutputs } from '../core/canonical.js'; +import { detectLegacyFiles } from '../core/cleanup.js'; import * as ui from '../utils/ui.js'; const BRIDGES: Bridge[] = [claudeBridge, cursorBridge, geminiBridge, windsurfBridge, copilotBridge]; const BRIDGE_IDS = new Set(BRIDGES.map((b) => b.id)); +const DIRECTORY_BRIDGE_IDS = new Set(BRIDGES.filter(isDirectoryBridge).map((bridge) => bridge.id)); + +function getConfiguredDirectoryBridges(config: ProjectConfig): DirectoryBridge[] { + return BRIDGES.filter((bridge): bridge is DirectoryBridge => { + return isDirectoryBridge(bridge) && DIRECTORY_BRIDGE_IDS.has(bridge.id) && config.tools.includes(bridge.id); + }); +} export interface CheckResult { passed: boolean; @@ -158,7 +168,7 @@ export async function checkSymlinks(cwd: string, config: ProjectConfig): Promise for (const bridge of BRIDGES) { if (!config.tools.includes(bridge.id)) continue; - for (const outputPath of bridge.outputPaths) { + for (const outputPath of getBridgeOutputPaths(bridge)) { const absolutePath = join(cwd, outputPath); try { const stat = await lstat(absolutePath); @@ -257,6 +267,212 @@ export async function checkHashSync(cwd: string, rules: Rule[]): Promise { + const canonicalDir = join(cwd, '.agents', 'rules', 'devw'); + + let entries: string[]; + try { + entries = await readdir(canonicalDir); + } catch { + return { + passed: false, + message: '.agents/rules/devw not found — run "devw compile"', + }; + } + + const canonicalFiles = entries.filter((entry) => entry.startsWith('dwf-') && entry.endsWith('.md')); + if (canonicalFiles.length === 0) { + return { + passed: false, + message: '.agents/rules/devw has no canonical files — run "devw compile"', + }; + } + + return { + passed: true, + message: `Canonical files exist (${String(canonicalFiles.length)} file${canonicalFiles.length === 1 ? '' : 's'})`, + }; +} + +export async function checkCanonicalSync(cwd: string, rules: Rule[], config: ProjectConfig): Promise { + const directoryBridges = getConfiguredDirectoryBridges(config); + + if (directoryBridges.length === 0) { + return { + passed: true, + message: 'Canonical sync skipped (no directory tools configured)', + skipped: true, + }; + } + + const canonicalOutputs = buildCanonicalOutputs(rules); + if (canonicalOutputs.size === 0) { + return { + passed: true, + message: 'Canonical sync skipped (no active scope outputs)', + skipped: true, + }; + } + + const mismatches: string[] = []; + let compared = 0; + + for (const bridge of directoryBridges) { + const expectedNativeFiles = new Set(); + + for (const [canonicalPath, canonicalContent] of canonicalOutputs) { + const canonicalFilename = basename(canonicalPath); + const scopeName = canonicalFilename.slice('dwf-'.length, canonicalFilename.length - '.md'.length); + const nativeFilename = `${bridge.filePrefix}${scopeName}${bridge.fileExtension}`; + expectedNativeFiles.add(nativeFilename); + + const nativePath = join(cwd, bridge.outputDir, nativeFilename); + if (!(await fileExists(nativePath))) { + mismatches.push(`${bridge.id}: missing ${nativeFilename}`); + continue; + } + + const nativeRaw = await readFile(nativePath, 'utf-8'); + const normalizedNative = normalizeComparableContent(nativeRaw); + const normalizedCanonical = normalizeComparableContent(canonicalContent); + + compared += 1; + if (normalizedNative !== normalizedCanonical) { + mismatches.push(`${bridge.id}: modified ${nativeFilename}`); + } + } + + const bridgeDir = join(cwd, bridge.outputDir); + let entries: string[] = []; + try { + entries = await readdir(bridgeDir); + } catch { + entries = []; + } + + for (const entry of entries) { + if (!entry.startsWith(bridge.filePrefix) || !entry.endsWith(bridge.fileExtension)) { + continue; + } + if (!expectedNativeFiles.has(entry)) { + mismatches.push(`${bridge.id}: unexpected ${entry}`); + } + } + } + + if (mismatches.length > 0) { + return { + passed: false, + message: `Canonical/native mismatch: ${mismatches.join(', ')}`, + }; + } + + return { + passed: true, + message: `Canonical and native files are in sync (${String(compared)} files compared)`, + }; +} + +export async function checkLegacyMigration(cwd: string): Promise { + const legacyFiles = await detectLegacyFiles(cwd); + if (legacyFiles.length === 0) { + return { passed: true, message: 'No legacy v0.5/v0.6 files pending migration' }; + } + + const pending = legacyFiles.map((legacy) => relative(cwd, legacy.path)); + return { + passed: false, + message: `Legacy files still present: ${pending.join(', ')}`, + }; +} + +export async function checkNativeFrontmatter(cwd: string, config: ProjectConfig): Promise { + const directoryBridges = getConfiguredDirectoryBridges(config); + + if (directoryBridges.length === 0) { + return { + passed: true, + message: 'Frontmatter check skipped (no directory tools configured)', + skipped: true, + }; + } + + const errors: string[] = []; + let checked = 0; + + for (const bridge of directoryBridges) { + const dirPath = join(cwd, bridge.outputDir); + let entries: string[] = []; + try { + entries = await readdir(dirPath); + } catch { + entries = []; + } + + for (const entry of entries) { + if (!entry.startsWith(bridge.filePrefix) || !entry.endsWith(bridge.fileExtension)) { + continue; + } + + checked += 1; + const filePath = join(dirPath, entry); + const content = await readFile(filePath, 'utf-8'); + const frontmatter = extractFrontmatter(content); + const requiresFrontmatter = bridge.id === 'cursor' || bridge.id === 'windsurf'; + + if (frontmatter === null) { + if (requiresFrontmatter) { + errors.push(`${bridge.id}: missing frontmatter in ${entry}`); + } + continue; + } + + try { + const parsed = parse(frontmatter); + if (parsed === null || typeof parsed !== 'object' || Array.isArray(parsed)) { + errors.push(`${bridge.id}: invalid frontmatter object in ${entry}`); + } + } catch { + errors.push(`${bridge.id}: invalid YAML frontmatter in ${entry}`); + } + } + } + + if (errors.length > 0) { + return { + passed: false, + message: `Invalid native frontmatter: ${errors.join(', ')}`, + }; + } + + if (checked === 0) { + return { + passed: true, + message: 'Frontmatter check skipped (no native files found)', + skipped: true, + }; + } + + return { + passed: true, + message: `Native frontmatter is valid (${String(checked)} files checked)`, + }; +} + export async function runDoctor(): Promise { const cwd = process.cwd(); const startTime = performance.now(); @@ -281,11 +497,6 @@ export async function runDoctor(): Promise { const configValidResult = await checkConfigValid(cwd); results.push(configValidResult); - let config: ProjectConfig | null = null; - if (configValidResult.passed) { - config = await readConfig(cwd); - } - // Check 3: Rule files are valid YAML const rulesValidResult = await checkRulesValid(cwd); results.push(rulesValidResult); @@ -300,6 +511,8 @@ export async function runDoctor(): Promise { return; } + const config = await readConfig(cwd); + // Load rules for remaining checks let rules: Rule[] = []; try { @@ -317,26 +530,43 @@ export async function runDoctor(): Promise { results.push(scopeResult); // Check 6: Tools have bridges - // config is guaranteed non-null here since configValidResult.passed - const bridgeResult = checkBridgesAvailable(config!); + const bridgeResult = checkBridgesAvailable(config); results.push(bridgeResult); // Check 7: Symlinks valid (conditional on mode) - const symlinkResult = await checkSymlinks(cwd, config!); + const symlinkResult = await checkSymlinks(cwd, config); results.push(symlinkResult); // Check 8: Pulled files exist - const pulledResult = await checkPulledFilesExist(cwd, config!.pulled); + const pulledResult = await checkPulledFilesExist(cwd, config.pulled); results.push(pulledResult); // Check 9: Asset files exist - const assetResult = await checkAssetFilesExist(cwd, config!.assets); + const assetResult = await checkAssetFilesExist(cwd, config.assets); results.push(assetResult); // Check 10: Hash sync (conditional on compiled files existing) const hashResult = await checkHashSync(cwd, rules); results.push(hashResult); + // Check 11: Canonical output exists (skip if no rules) + if (rules.length > 0) { + const canonicalExistsResult = await checkCanonicalExists(cwd); + results.push(canonicalExistsResult); + + // Check 12: Canonical and native outputs are synchronized + const canonicalSyncResult = await checkCanonicalSync(cwd, rules, config); + results.push(canonicalSyncResult); + } + + // Check 13: Legacy migration has no pending files + const legacyResult = await checkLegacyMigration(cwd); + results.push(legacyResult); + + // Check 14: Native files have valid frontmatter for their editor + const frontmatterResult = await checkNativeFrontmatter(cwd, config); + results.push(frontmatterResult); + // Output for (const r of results) { ui.check(r.passed, r.message, r.skipped); diff --git a/packages/cli/src/commands/explain.ts b/packages/cli/src/commands/explain.ts index 4569475..c9f102c 100644 --- a/packages/cli/src/commands/explain.ts +++ b/packages/cli/src/commands/explain.ts @@ -1,8 +1,9 @@ import { join } from 'node:path'; import type { Command } from 'commander'; -import chalk from 'chalk'; +import pc from 'picocolors'; import { readConfig, readRules } from '../core/parser.js'; import type { Bridge, Rule } from '../bridges/types.js'; +import { isMarkerBridge, isDirectoryBridge, getBridgeOutputPaths } from '../bridges/types.js'; import { claudeBridge } from '../bridges/claude.js'; import { cursorBridge } from '../bridges/cursor.js'; import { geminiBridge } from '../bridges/gemini.js'; @@ -26,10 +27,10 @@ function getBridge(id: string): Bridge | undefined { } function getModeLabel(bridge: Bridge): string { - if (bridge.usesMarkers) { + if (isMarkerBridge(bridge)) { return 'markers (BEGIN/END)'; } - return 'full file'; + return 'multi-file (one per scope)'; } function getExcludedRules(rules: Rule[]): Array<{ id: string; reason: string }> { @@ -52,7 +53,7 @@ function formatSeparator(toolId: string): string { const prefix = `${ICONS.separator}${ICONS.separator}`; const remaining = lineWidth - prefix.length - label.length; const suffix = ICONS.separator.repeat(Math.max(0, remaining)); - return chalk.dim(`${prefix}${label}${suffix}`); + return pc.dim(`${prefix}${label}${suffix}`); } async function runExplain(options: ExplainOptions): Promise { @@ -79,21 +80,50 @@ async function runExplain(options: ExplainOptions): Promise { for (const toolId of toolIds) { const bridge = getBridge(toolId); - if (!bridge) continue; - - const outputPath = bridge.outputPaths[0] ?? toolId; + if (!bridge) { + continue; + } console.log(` ${formatSeparator(toolId)}`); ui.newline(); - ui.keyValue('Output:', outputPath); - ui.keyValue('Mode:', getModeLabel(bridge)); - const included = filterRules(rules); - const grouped = groupByScope(included); + if (isDirectoryBridge(bridge)) { + // DirectoryBridge: show output directory and file listing + const outputPattern = `${bridge.outputDir}/${bridge.filePrefix}*${bridge.fileExtension}`; + ui.keyValue('Output:', outputPattern); + ui.keyValue('Mode:', getModeLabel(bridge)); + + const included = filterRules(rules); + const grouped = groupByScope(included); - ui.keyValue('Rules:', `${String(included.length)} included`); - for (const [scope, scopeRules] of grouped) { - console.log(` ${' '.repeat(10)}${scope}: ${String(scopeRules.length)}`); + ui.keyValue('Rules:', `${String(included.length)} included`); + + // Show files that would be generated (one per scope) + ui.newline(); + ui.keyValue('Files:', `${String(grouped.size)} scope${grouped.size !== 1 ? 's' : ''}`); + const outputs = bridge.compile(rules, config); + for (const [filePath] of outputs) { + console.log(` ${' '.repeat(10)}${filePath}`); + } + + // Show scope breakdown + for (const [scope, scopeRules] of grouped) { + console.log(` ${' '.repeat(10)} ${scope}: ${String(scopeRules.length)} rule${scopeRules.length !== 1 ? 's' : ''}`); + } + } else { + // MarkerBridge: show single output file + const bridgePaths = getBridgeOutputPaths(bridge); + const outputPath = bridgePaths[0] ?? toolId; + ui.keyValue('Output:', outputPath); + ui.keyValue('Mode:', getModeLabel(bridge)); + + const included = filterRules(rules); + const grouped = groupByScope(included); + + ui.keyValue('Rules:', `${String(included.length)} included`); + for (const [scope, scopeRules] of grouped) { + console.log(` ${' '.repeat(10)}${scope}: ${String(scopeRules.length)}`); + } } const excluded = getExcludedRules(rules); @@ -108,12 +138,16 @@ async function runExplain(options: ExplainOptions): Promise { if (bridge.id === 'windsurf') { const outputs = bridge.compile(rules, config); - const content = outputs.get('.windsurf/rules/devworkflows.md') ?? ''; - const charCount = content.length; - const formatted = `${String(charCount).replace(/\B(?=(\d{3})+(?!\d))/g, ',')} / ${String(WINDSURF_CHAR_LIMIT).replace(/\B(?=(\d{3})+(?!\d))/g, ',')} chars`; + let maxPerFile = 0; + for (const [, val] of outputs) { + if (val.length > maxPerFile) { + maxPerFile = val.length; + } + } + const formatted = `${String(maxPerFile).replace(/\B(?=(\d{3})+(?!\d))/g, ',')} / ${String(WINDSURF_CHAR_LIMIT).replace(/\B(?=(\d{3})+(?!\d))/g, ',')} chars (per file)`; ui.newline(); - if (charCount > WINDSURF_CHAR_LIMIT) { - ui.warn(`Output size: ${formatted} (Windsurf limit)`); + if (maxPerFile > WINDSURF_CHAR_LIMIT) { + ui.warn(`Max file size: ${formatted} (Windsurf limit)`); } else { ui.keyValue('Size:', `${formatted} (Windsurf limit)`); } diff --git a/packages/cli/src/commands/init.ts b/packages/cli/src/commands/init.ts index c5ad522..54701b7 100644 --- a/packages/cli/src/commands/init.ts +++ b/packages/cli/src/commands/init.ts @@ -1,17 +1,27 @@ import { mkdir, writeFile, readFile, appendFile } from 'node:fs/promises'; import { join, basename } from 'node:path'; +import { homedir } from 'node:os'; import type { Command } from 'commander'; import { stringify } from 'yaml'; -import chalk from 'chalk'; -import { checkbox, select } from '@inquirer/prompts'; +import pc from 'picocolors'; import { detectTools, SUPPORTED_TOOLS } from '../utils/detect-tools.js'; import * as ui from '../utils/ui.js'; import type { ToolId } from '../utils/detect-tools.js'; import { fileExists } from '../utils/fs.js'; +import { + selectPrompt, + multiselectPrompt, + introPrompt, + notePrompt, + outroPrompt, + spinnerTask, + isInteractiveSession, +} from '../utils/prompt.js'; export interface InitOptions { tools?: string; mode?: 'copy' | 'link'; + global?: boolean; yes?: boolean; preset?: string; } @@ -54,13 +64,14 @@ async function resolveTools(options: InitOptions, cwd: string): Promise({ + const selected = await multiselectPrompt({ message: 'Which tools to configure?', - choices: SUPPORTED_TOOLS.map((id) => ({ - name: id, + options: SUPPORTED_TOOLS.map((id) => ({ + label: id, value: id, - checked: detectedIds.includes(id), + hint: detectedIds.includes(id) ? 'detected' : undefined, })), + initialValues: detectedIds, }); if (selected.length > 0) { @@ -83,11 +94,11 @@ async function resolveMode(options: InitOptions): Promise<'copy' | 'link'> { return 'copy'; } - const mode = await select<'copy' | 'link'>({ + const mode = await selectPrompt<'copy' | 'link'>({ message: 'Output mode', - choices: [ - { name: 'copy', value: 'copy' as const, description: 'Embed rules directly in tool config files' }, - { name: 'link', value: 'link' as const, description: 'Symlink tool config files to .dwf/ output' }, + options: [ + { label: 'copy', value: 'copy' as const, hint: 'Embed rules directly in tool config files' }, + { label: 'link', value: 'link' as const, hint: 'Symlink tool config files to .dwf/ output' }, ], }); @@ -108,20 +119,39 @@ async function appendToGitignore(cwd: string): Promise { } } -async function runInit(options: InitOptions): Promise { - const cwd = process.cwd(); - const dwfDir = join(cwd, '.dwf'); +type InitScope = 'project' | 'global'; - if (await fileExists(dwfDir)) { - ui.error('.dwf/ already exists in this directory', 'Remove it first or run from a different directory'); - process.exitCode = 1; - return; +async function resolveInitScope(options: InitOptions): Promise { + if (options.global) { + return 'global'; } + if (options.yes) { + return 'project'; + } + + return selectPrompt({ + message: 'Where do you want to set up devw?', + options: [ + { label: 'This project (.dwf/)', value: 'project' as const }, + { label: 'Global (~/.dwf/)', value: 'global' as const }, + ], + }); +} + +export async function runInit(options: InitOptions): Promise { + const cwd = process.cwd(); + if (isInteractiveSession() && !options.yes) { + introPrompt('Initialize dev-workflows'); + } + + let scope: InitScope; let tools: ToolId[]; let mode: 'copy' | 'link'; try { - tools = await resolveTools(options, cwd); + scope = await resolveInitScope(options); + const toolDetectRoot = scope === 'global' ? homedir() : cwd; + tools = await resolveTools(options, toolDetectRoot); mode = await resolveMode(options); } catch (err) { if (err instanceof Error && err.name === 'ExitPromptError') return; @@ -129,48 +159,92 @@ async function runInit(options: InitOptions): Promise { process.exitCode = 1; return; } - const projectName = basename(cwd); - // Create .dwf/rules/ and .dwf/assets/ + const rootDir = scope === 'global' ? homedir() : cwd; + const dwfDir = join(rootDir, '.dwf'); + + if (await fileExists(dwfDir)) { + const locationHint = scope === 'global' + ? '~/.dwf/ already exists in your home directory' + : '.dwf/ already exists in this directory'; + ui.error(locationHint, 'Remove it first or run from a different directory'); + process.exitCode = 1; + return; + } + + const projectName = scope === 'global' ? 'global' : basename(cwd); + const rulesDir = join(dwfDir, 'rules'); - await mkdir(rulesDir, { recursive: true }); - await mkdir(join(dwfDir, 'assets'), { recursive: true }); + await spinnerTask({ + label: 'Creating workspace folders', + task: async () => { + await mkdir(rulesDir, { recursive: true }); + await mkdir(join(dwfDir, 'assets'), { recursive: true }); + }, + }); // Write config.yml const config = { - version: '0.1', + version: '0.2', project: { name: projectName }, tools, mode, + global: true, blocks: [] as string[], }; const configContent = `# Dev Workflows configuration\n${stringify(config)}`; - await writeFile(join(dwfDir, 'config.yml'), configContent, 'utf-8'); + await spinnerTask({ + label: 'Writing config.yml', + task: async () => { + await writeFile(join(dwfDir, 'config.yml'), configContent, 'utf-8'); + }, + }); // Write empty rule files - for (const scope of BUILTIN_SCOPES) { - await writeFile(join(rulesDir, `${scope}.yml`), buildRuleFileContent(scope), 'utf-8'); - } + await spinnerTask({ + label: 'Scaffolding rule files', + task: async () => { + for (const scope of BUILTIN_SCOPES) { + await writeFile(join(rulesDir, `${scope}.yml`), buildRuleFileContent(scope), 'utf-8'); + } + }, + }); - // Append .dwf/.cache/ to .gitignore - await appendToGitignore(cwd); + // Ensure canonical global output dir exists for global mode. + if (scope === 'global') { + await spinnerTask({ + label: 'Preparing canonical global output', + task: async () => { + await mkdir(join(rootDir, '.agents', 'rules', 'devw'), { recursive: true }); + }, + }); + } else { + await appendToGitignore(cwd); + } // Success summary ui.newline(); ui.header('dev-workflows'); ui.newline(); - ui.success('Initialized .dwf/ successfully'); + ui.success(`Initialized ${scope === 'global' ? '~/.dwf/' : '.dwf/'} successfully`); ui.newline(); - ui.keyValue('Project:', chalk.bold(projectName)); - ui.keyValue('Tools:', chalk.cyan(tools.join(', '))); + ui.keyValue('Project:', pc.bold(projectName)); + ui.keyValue('Scope:', scope); + ui.keyValue('Tools:', pc.cyan(tools.join(', '))); ui.keyValue('Mode:', mode); ui.newline(); ui.header("What's next"); ui.newline(); - console.log(` 1. Browse available rules ${chalk.cyan('devw add --list')}`); - console.log(` 2. Add a rule ${chalk.cyan('devw add /')}`); - console.log(` 3. Or write your own rules in ${chalk.cyan('.dwf/rules/')}`); - console.log(` 4. When ready, compile ${chalk.cyan('devw compile')}`); + console.log(` 1. Browse available rules ${pc.cyan('devw add --list')}`); + console.log(` 2. Add a rule ${pc.cyan('devw add /')}`); + console.log(` 3. Or write your own rules in ${pc.cyan(scope === 'global' ? '~/.dwf/rules/' : '.dwf/rules/')}`); + console.log(` 4. When ready, compile ${pc.cyan('devw compile')}`); + + notePrompt( + `Project: ${projectName}\nScope: ${scope}\nTools: ${tools.join(', ')}\nMode: ${mode}`, + 'Initialized', + ); + outroPrompt(`Ready: ${scope === 'global' ? '~/.dwf/' : '.dwf/'}`); if (options.preset) { ui.newline(); @@ -187,9 +261,10 @@ async function runInit(options: InitOptions): Promise { export function registerInitCommand(program: Command): void { program .command('init') - .description('Initialize .dwf/ in the current project') + .description('Initialize .dwf/ in this project or globally') .option('--tools ', 'Comma-separated list of tools (claude,cursor,gemini)') .option('--mode ', 'Output mode: copy or link') + .option('--global', 'Initialize global config in ~/.dwf/') .option('--preset ', 'Install a preset after initialization (e.g., spec-driven)') .option('-y, --yes', 'Accept all defaults') .action((options: InitOptions) => runInit(options)); diff --git a/packages/cli/src/commands/list.ts b/packages/cli/src/commands/list.ts index c424635..6d083f4 100644 --- a/packages/cli/src/commands/list.ts +++ b/packages/cli/src/commands/list.ts @@ -1,6 +1,6 @@ import { join } from 'node:path'; import type { Command } from 'commander'; -import chalk from 'chalk'; +import pc from 'picocolors'; import { readConfig, readRules } from '../core/parser.js'; import { fileExists } from '../utils/fs.js'; import { claudeBridge } from '../bridges/claude.js'; @@ -9,7 +9,8 @@ import { geminiBridge } from '../bridges/gemini.js'; import { windsurfBridge } from '../bridges/windsurf.js'; import { copilotBridge } from '../bridges/copilot.js'; import type { Bridge } from '../bridges/types.js'; -import { ASSET_TYPE } from '../bridges/types.js'; +import { ASSET_TYPE, isDirectoryBridge, getBridgeOutputPaths } from '../bridges/types.js'; +import { filterRules, groupByScope } from '../core/helpers.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; @@ -45,17 +46,17 @@ async function listRules(): Promise { ui.header(`Active rules (${String(active.length)})`); ui.newline(); for (const rule of active) { - const severityIcon = rule.severity === 'error' ? chalk.red(ICONS.error) : rule.severity === 'warning' ? chalk.yellow(ICONS.warn) : chalk.dim(ICONS.dot); - const severityColor = rule.severity === 'error' ? chalk.red : rule.severity === 'warning' ? chalk.yellow : chalk.dim; + const severityIcon = rule.severity === 'error' ? pc.red(ICONS.error) : rule.severity === 'warning' ? pc.yellow(ICONS.warn) : pc.dim(ICONS.dot); + const severityColor = rule.severity === 'error' ? pc.red : rule.severity === 'warning' ? pc.yellow : pc.dim; let source = ''; if (rule.source) { - source = chalk.dim(` (pulled: ${rule.source})`); + source = pc.dim(` (pulled: ${rule.source})`); } else if (rule.sourceBlock) { - source = chalk.dim(` [${rule.sourceBlock}]`); + source = pc.dim(` [${rule.sourceBlock}]`); } else { - source = chalk.dim(` ${ICONS.arrow} manual`); + source = pc.dim(` ${ICONS.arrow} manual`); } - console.log(` ${severityIcon} ${severityColor(rule.severity.padEnd(8))}${chalk.cyan(rule.scope.padEnd(15))}${rule.id}${source}`); + console.log(` ${severityIcon} ${severityColor(rule.severity.padEnd(8))}${pc.cyan(rule.scope.padEnd(15))}${rule.id}${source}`); } } @@ -70,6 +71,13 @@ async function listTools(): Promise { if (!(await ensureConfig(cwd))) return; const config = await readConfig(cwd); + let activeScopeCount = 0; + try { + const rules = await readRules(cwd); + activeScopeCount = groupByScope(filterRules(rules)).size; + } catch { + activeScopeCount = 0; + } if (config.tools.length === 0) { ui.warn('No tools configured'); @@ -80,11 +88,19 @@ async function listTools(): Promise { ui.newline(); for (const tool of config.tools) { const bridge = BRIDGES.find((b) => b.id === tool); - const outputPath = bridge?.outputPaths[0]; - if (outputPath) { - console.log(` ${chalk.dim(ICONS.bullet)} ${chalk.cyan(tool.padEnd(12))}${chalk.dim(ICONS.arrow)} ${chalk.dim(outputPath)}`); + let outputLabel: string | undefined; + if (bridge) { + if (isDirectoryBridge(bridge)) { + outputLabel = `${bridge.outputDir}/${bridge.filePrefix}*${bridge.fileExtension} (${String(activeScopeCount)} file${activeScopeCount === 1 ? '' : 's'})`; + } else { + const paths = getBridgeOutputPaths(bridge); + outputLabel = paths[0]; + } + } + if (outputLabel) { + console.log(` ${pc.dim(ICONS.bullet)} ${pc.cyan(tool.padEnd(12))}${pc.dim(ICONS.arrow)} ${pc.dim(outputLabel)}`); } else { - console.log(` ${chalk.dim(ICONS.bullet)} ${chalk.cyan(tool)}`); + console.log(` ${pc.dim(ICONS.bullet)} ${pc.cyan(tool)}`); } } } @@ -124,7 +140,7 @@ async function listAssets(typeFilter?: string): Promise { ui.newline(); for (const asset of filtered) { const outputHint = getAssetOutputHint(asset.type, asset.name); - console.log(` ${chalk.dim(ICONS.bullet)} ${chalk.cyan(asset.type.padEnd(10))} ${chalk.white(asset.name.padEnd(20))} ${chalk.dim(`v${asset.version}`)} ${chalk.dim(ICONS.arrow)} ${chalk.dim(outputHint)}`); + console.log(` ${pc.dim(ICONS.bullet)} ${pc.cyan(asset.type.padEnd(10))} ${pc.white(asset.name.padEnd(20))} ${pc.dim(`v${asset.version}`)} ${pc.dim(ICONS.arrow)} ${pc.dim(outputHint)}`); } } diff --git a/packages/cli/src/commands/menu.ts b/packages/cli/src/commands/menu.ts index 9fa6361..29868e0 100644 --- a/packages/cli/src/commands/menu.ts +++ b/packages/cli/src/commands/menu.ts @@ -1,19 +1,10 @@ -import { select } from '@inquirer/prompts'; -import chalk from 'chalk'; import type { Command } from 'commander'; import { runAdd } from './add.js'; import { runRemove } from './remove.js'; import { runDoctor } from './doctor.js'; import { runCompile } from './compile.js'; - -const menuTheme = { - style: { - keysHelpTip: (keys: [string, string][]): string => - [...keys, ['Ctrl+C', 'back']] - .map(([key, action]) => `${chalk.bold(key)} ${chalk.dim(action)}`) - .join(chalk.dim(' • ')), - }, -} as const; +import { renderBanner } from '../utils/banner.js'; +import { selectPrompt, introPrompt, outroPrompt, isInteractiveSession } from '../utils/prompt.js'; const MENU_CHOICES = { ADD: 'add', @@ -26,57 +17,48 @@ const MENU_CHOICES = { type MenuChoice = (typeof MENU_CHOICES)[keyof typeof MENU_CHOICES]; export async function runMainMenu(command: Command): Promise { - if (!process.stdout.isTTY || !process.stdin.isTTY) { + if (!isInteractiveSession()) { command.help(); return; } + const banner = renderBanner(); + if (banner.length > 0) { + console.log(banner); + } + introPrompt('Welcome to dev-workflows'); + while (true) { let choice: MenuChoice; - try { - choice = await select({ - message: 'What do you want to do?', - theme: menuTheme, - choices: [ - { name: 'Add rules or assets', value: MENU_CHOICES.ADD }, - { name: 'Compile for all editors', value: MENU_CHOICES.COMPILE }, - { name: 'Check project status', value: MENU_CHOICES.DOCTOR }, - { name: 'Remove something', value: MENU_CHOICES.REMOVE }, - { name: 'Exit', value: MENU_CHOICES.EXIT }, - ], - }); - } catch (err) { - if (err instanceof Error && err.name === 'ExitPromptError') { - process.exit(0); - } - throw err; - } + choice = await selectPrompt({ + message: 'What do you want to do?', + options: [ + { label: 'Add rules or assets', value: MENU_CHOICES.ADD }, + { label: 'Compile for all editors', value: MENU_CHOICES.COMPILE }, + { label: 'Check project status', value: MENU_CHOICES.DOCTOR }, + { label: 'Remove something', value: MENU_CHOICES.REMOVE }, + { label: 'Exit', value: MENU_CHOICES.EXIT }, + ], + }); if (choice === MENU_CHOICES.EXIT) { + outroPrompt('See you next time.'); process.exit(0); } - try { - switch (choice) { - case MENU_CHOICES.ADD: - await runAdd(undefined, {}); - break; - case MENU_CHOICES.COMPILE: - await runCompile({ verbose: false, dryRun: false }); - break; - case MENU_CHOICES.DOCTOR: - await runDoctor(); - break; - case MENU_CHOICES.REMOVE: - await runRemove(undefined); - break; - } - } catch (err) { - if (err instanceof Error && err.name === 'ExitPromptError') { - // Ctrl+C inside a subcommand — return to main menu - } else { - throw err; - } + switch (choice) { + case MENU_CHOICES.ADD: + await runAdd(undefined, {}); + break; + case MENU_CHOICES.COMPILE: + await runCompile({ verbose: false, dryRun: false }); + break; + case MENU_CHOICES.DOCTOR: + await runDoctor(); + break; + case MENU_CHOICES.REMOVE: + await runRemove(undefined); + break; } } } diff --git a/packages/cli/src/commands/remove.ts b/packages/cli/src/commands/remove.ts index 00df3eb..04e2b52 100644 --- a/packages/cli/src/commands/remove.ts +++ b/packages/cli/src/commands/remove.ts @@ -2,11 +2,11 @@ import { join } from 'node:path'; import { readFile, writeFile, unlink } from 'node:fs/promises'; import type { Command } from 'commander'; import { parse, stringify } from 'yaml'; -import { checkbox, confirm, Separator } from '@inquirer/prompts'; import { readConfig } from '../core/parser.js'; import { fileExists } from '../utils/fs.js'; import { isAssetType, removeAsset } from '../core/assets.js'; import { validateInput } from './add.js'; +import { multiselectPrompt, confirmPrompt, introPrompt, outroPrompt, isInteractiveSession } from '../utils/prompt.js'; import * as ui from '../utils/ui.js'; import type { PulledEntry, AssetEntry } from '../bridges/types.js'; @@ -54,6 +54,10 @@ async function removeRule(cwd: string, path: string): Promise { export async function runRemove(ruleArg: string | undefined): Promise { const cwd = process.cwd(); + if (isInteractiveSession()) { + introPrompt('Remove rules or assets'); + } + if (!(await fileExists(join(cwd, '.dwf', 'config.yml')))) { ui.error('.dwf/config.yml not found', 'Run devw init to initialize the project'); process.exitCode = 1; @@ -63,6 +67,12 @@ export async function runRemove(ruleArg: string | undefined): Promise { const config = await readConfig(cwd); if (!ruleArg) { + if (!isInteractiveSession()) { + ui.error('No rule specified', 'Usage: devw remove /'); + process.exitCode = 1; + return; + } + const hasRules = config.pulled.length > 0; const hasAssets = config.assets.length > 0; @@ -73,33 +83,30 @@ export async function runRemove(ruleArg: string | undefined): Promise { type RemoveChoice = { kind: 'rule'; path: string } | { kind: 'asset'; type: string; name: string }; - const choices: (RemoveChoice | Separator)[] = []; + const choices: RemoveChoice[] = []; if (hasRules) { - choices.push(new Separator('── Rules ──')); for (const p of config.pulled) { - choices.push({ kind: 'rule', path: p.path } as RemoveChoice); + choices.push({ kind: 'rule', path: p.path }); } } if (hasAssets) { - choices.push(new Separator('── Assets ──')); for (const a of config.assets) { - choices.push({ kind: 'asset', type: a.type, name: a.name } as RemoveChoice); + choices.push({ kind: 'asset', type: a.type, name: a.name }); } } let selected: RemoveChoice[]; try { - selected = await checkbox({ + selected = await multiselectPrompt({ message: 'Select items to remove', - choices: choices.map((c) => { - if (c instanceof Separator) return c; + options: choices.map((c) => { if (c.kind === 'rule') { const entry = config.pulled.find((p) => p.path === c.path); - return { name: `${c.path} (v${entry?.version ?? '?'})`, value: c }; + return { label: `[rule] ${c.path} (v${entry?.version ?? '?'})`, value: c }; } - return { name: `${c.type}/${c.name}`, value: c }; + return { label: `[asset] ${c.type}/${c.name}`, value: c }; }), }); } catch { @@ -112,9 +119,9 @@ export async function runRemove(ruleArg: string | undefined): Promise { } try { - const shouldProceed = await confirm({ + const shouldProceed = await confirmPrompt({ message: `Remove ${String(selected.length)} item(s)?`, - default: true, + defaultValue: true, }); if (!shouldProceed) { ui.info('Remove cancelled'); @@ -137,6 +144,7 @@ export async function runRemove(ruleArg: string | undefined): Promise { const { runCompileFromAdd } = await import('./compile.js'); await runCompileFromAdd(); + outroPrompt('Remove command completed'); return; } @@ -185,6 +193,7 @@ export async function runRemove(ruleArg: string | undefined): Promise { const { runCompileFromAdd } = await import('./compile.js'); await runCompileFromAdd(); + outroPrompt('Remove command completed'); return; } @@ -206,6 +215,7 @@ export async function runRemove(ruleArg: string | undefined): Promise { const { runCompileFromAdd } = await import('./compile.js'); await runCompileFromAdd(); + outroPrompt('Remove command completed'); } export function registerRemoveCommand(program: Command): void { diff --git a/packages/cli/src/commands/watch.ts b/packages/cli/src/commands/watch.ts index e7a1ed0..9b24866 100644 --- a/packages/cli/src/commands/watch.ts +++ b/packages/cli/src/commands/watch.ts @@ -1,6 +1,6 @@ import { join } from 'node:path'; import type { Command } from 'commander'; -import chalk from 'chalk'; +import pc from 'picocolors'; import chokidar from 'chokidar'; import { executePipeline } from './compile.js'; import type { CompileResult } from './compile.js'; @@ -102,7 +102,7 @@ async function runWatch(options: WatchOptions): Promise { }); ui.newline(); - ui.header(chalk.green('Watching .dwf/ for changes...')); + ui.header(pc.green('Watching .dwf/ for changes...')); ui.info('Running initial compile...'); ui.newline(); diff --git a/packages/cli/src/core/canonical.ts b/packages/cli/src/core/canonical.ts new file mode 100644 index 0000000..75de964 --- /dev/null +++ b/packages/cli/src/core/canonical.ts @@ -0,0 +1,63 @@ +import { mkdir, writeFile } from 'node:fs/promises'; +import { basename, join } from 'node:path'; +import type { Rule } from '../bridges/types.js'; +import { filterRules, formatScopeHeading, groupByScope } from './helpers.js'; +import { cleanStaleFiles, scopeToFilename } from './scope-filename.js'; + +const GENERATED_COMMENT = ''; +const CANONICAL_DIR_PARTS = ['.agents', 'rules', 'devw'] as const; +const CANONICAL_PREFIX = 'dwf-'; +const CANONICAL_EXTENSION = '.md'; + +export function buildCanonicalMarkdown(scope: string, rules: Rule[]): string { + const lines: string[] = [GENERATED_COMMENT, `# ${formatScopeHeading(scope)}`, '']; + + for (const rule of rules) { + const contentLines = rule.content.split('\n'); + const first = contentLines[0]; + if (first !== undefined) { + lines.push(`- ${first}`); + } + + for (let i = 1; i < contentLines.length; i++) { + const line = contentLines[i]; + if (line !== undefined) { + lines.push(line.length > 0 ? ` ${line}` : ''); + } + } + } + + lines.push(''); + return lines.join('\n'); +} + +export function buildCanonicalOutputs(rules: Rule[]): Map { + const output = new Map(); + const filtered = filterRules(rules); + const grouped = groupByScope(filtered); + + for (const [scope, scopeRules] of grouped) { + const filename = scopeToFilename(scope, CANONICAL_PREFIX, CANONICAL_EXTENSION); + const relativePath = join(...CANONICAL_DIR_PARTS, filename); + output.set(relativePath, buildCanonicalMarkdown(scope, scopeRules)); + } + + return output; +} + +export async function writeCanonical(cwd: string, rulesOrOutputs: Rule[] | Map): Promise { + const canonicalDir = join(cwd, ...CANONICAL_DIR_PARTS); + await mkdir(canonicalDir, { recursive: true }); + + const outputs = rulesOrOutputs instanceof Map ? rulesOrOutputs : buildCanonicalOutputs(rulesOrOutputs); + const writtenFilenames = new Set(); + + for (const [relativePath, content] of outputs) { + const filename = basename(relativePath); + writtenFilenames.add(filename); + await writeFile(join(cwd, relativePath), content, 'utf-8'); + } + + await cleanStaleFiles(canonicalDir, CANONICAL_PREFIX, CANONICAL_EXTENSION, writtenFilenames); + return [...outputs.keys()]; +} diff --git a/packages/cli/src/core/cleanup.ts b/packages/cli/src/core/cleanup.ts new file mode 100644 index 0000000..3a7f56e --- /dev/null +++ b/packages/cli/src/core/cleanup.ts @@ -0,0 +1,112 @@ +import { readFile, unlink, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { fileExists } from '../utils/fs.js'; +import { removeMarkedBlock } from './markers.js'; + +export interface LegacyFile { + path: string; + type: 'marker' | 'full-file'; + bridgeId: string; +} + +/** + * Detect legacy single-file output from v0.5/v0.6 that needs migration. + * + * Only .cursor and .windsurf had full-file replacement, so only those are legacy. + * CLAUDE.md markers need to be removed since Claude is now a DirectoryBridge. + * GEMINI.md and .github/copilot-instructions.md are NOT legacy (they remain MarkerBridge). + */ +export async function detectLegacyFiles(cwd: string): Promise { + const legacyFiles: LegacyFile[] = []; + + // Check for legacy .cursor/rules/devworkflows.mdc (full-file) + const cursorLegacy = join(cwd, '.cursor', 'rules', 'devworkflows.mdc'); + if (await fileExists(cursorLegacy)) { + legacyFiles.push({ + path: cursorLegacy, + type: 'full-file', + bridgeId: 'cursor', + }); + } + + // Check for legacy .windsurf/rules/devworkflows.md (full-file) + const windsurfLegacy = join(cwd, '.windsurf', 'rules', 'devworkflows.md'); + if (await fileExists(windsurfLegacy)) { + legacyFiles.push({ + path: windsurfLegacy, + type: 'full-file', + bridgeId: 'windsurf', + }); + } + + // Check for CLAUDE.md with dev-workflows markers (marker type) + const claudeLegacy = join(cwd, 'CLAUDE.md'); + if (await fileExists(claudeLegacy)) { + const content = await readFile(claudeLegacy, 'utf-8'); + if (content.includes('') && content.includes('')) { + legacyFiles.push({ + path: claudeLegacy, + type: 'marker', + bridgeId: 'claude', + }); + } + } + + return legacyFiles; +} + +/** + * Remove legacy files. For full-file types, delete the file. + * For marker types, remove the marker block preserving user content. + * Returns list of actions taken (for user messaging). + * + * This is idempotent — if files don't exist, skip silently. + */ +export async function migrateLegacyFiles(_cwd: string, legacyFiles: LegacyFile[]): Promise { + const actions: string[] = []; + + for (const legacy of legacyFiles) { + if (!(await fileExists(legacy.path))) { + continue; + } + + if (legacy.type === 'full-file') { + await unlink(legacy.path); + actions.push(`Removed legacy ${legacy.path}`); + } else if (legacy.type === 'marker') { + const removed = await removeLegacyMarkerBlock(legacy.path); + if (removed) { + actions.push(`Removed devw block from ${legacy.path}`); + } + } + } + + return actions; +} + +/** + * Remove the old marker block (BEGIN/END dev-workflows) from a file. + * If the file becomes empty after removal, delete it. + * Returns true if changes were made. + */ +export async function removeLegacyMarkerBlock(filePath: string): Promise { + if (!(await fileExists(filePath))) { + return false; + } + + const content = await readFile(filePath, 'utf-8'); + + if (!content.includes('') || !content.includes('')) { + return false; + } + + const cleaned = removeMarkedBlock(content); + + if (cleaned.trim() === '') { + await unlink(filePath); + } else { + await writeFile(filePath, cleaned, 'utf-8'); + } + + return true; +} diff --git a/packages/cli/src/core/merge.ts b/packages/cli/src/core/merge.ts new file mode 100644 index 0000000..70ebd20 --- /dev/null +++ b/packages/cli/src/core/merge.ts @@ -0,0 +1,8 @@ +import type { Rule } from '../bridges/types.js'; + +export function mergeRules(globalRules: Rule[], projectRules: Rule[]): Rule[] { + const projectRuleIds = new Set(projectRules.map((rule) => rule.id)); + const mergedGlobalRules = globalRules.filter((rule) => !projectRuleIds.has(rule.id)); + + return [...mergedGlobalRules, ...projectRules]; +} diff --git a/packages/cli/src/core/parser.ts b/packages/cli/src/core/parser.ts index b18cf71..7eefd32 100644 --- a/packages/cli/src/core/parser.ts +++ b/packages/cli/src/core/parser.ts @@ -1,9 +1,9 @@ import { readFile, readdir } from 'node:fs/promises'; import { join } from 'node:path'; import { parse } from 'yaml'; -import type { Rule, ProjectConfig, PulledEntry, AssetEntry, AssetType } from '../bridges/types.js'; +import type { Rule, ProjectConfig, PulledEntry, AssetEntry, AssetType, ScopeMetadata } from '../bridges/types.js'; import { ASSET_TYPE } from '../bridges/types.js'; -import { isValidScope } from './schema.js'; +import { isValidScope, validateScopeMetadata, VALID_CONFIG_VERSIONS } from './schema.js'; interface RawRule { id?: string; @@ -17,11 +17,19 @@ interface RawRule { interface RawRuleFile { scope?: string; + metadata?: Record; + globs?: unknown; + paths?: unknown; + trigger?: unknown; rules?: RawRule[]; } export async function readConfig(cwd: string): Promise { - const configPath = join(cwd, '.dwf', 'config.yml'); + return readConfigFromDwfDir(join(cwd, '.dwf')); +} + +export async function readConfigFromDwfDir(dwfDir: string): Promise { + const configPath = join(dwfDir, 'config.yml'); const raw = await readFile(configPath, 'utf-8'); const parsed: unknown = parse(raw); @@ -33,6 +41,11 @@ export async function readConfig(cwd: string): Promise { const version = typeof doc['version'] === 'string' ? doc['version'] : '0.1'; + const validVersions = VALID_CONFIG_VERSIONS as readonly string[]; + if (!validVersions.includes(version)) { + throw new Error(`Invalid config.yml: unsupported version "${version}". Supported versions: ${VALID_CONFIG_VERSIONS.join(', ')}`); + } + const projectRaw = doc['project']; if (!projectRaw || typeof projectRaw !== 'object') { throw new Error('Invalid config.yml: missing "project" section'); @@ -83,6 +96,9 @@ export async function readConfig(cwd: string): Promise { .filter((a) => a.name !== '' && assetTypeValues.has(a.type)) : []; + const globalRaw = doc['global']; + const global = typeof globalRaw === 'boolean' ? globalRaw : true; + return { version, project: { name: projectName, description: projectDescription }, @@ -91,14 +107,19 @@ export async function readConfig(cwd: string): Promise { blocks, pulled, assets, + global, }; } -function normalizeRule(raw: RawRule, scope: string): Rule | null { - if (!raw.id || !raw.content) return null; +function normalizeRule(raw: RawRule, scope: string, scopeMetadata?: ScopeMetadata): Rule | null { + if (!raw.id || !raw.content) { + return null; + } const severity = raw.severity ?? 'error'; - if (severity !== 'error' && severity !== 'warning' && severity !== 'info') return null; + if (severity !== 'error' && severity !== 'warning' && severity !== 'info') { + return null; + } const enabled = raw.enabled !== false; @@ -111,12 +132,58 @@ function normalizeRule(raw: RawRule, scope: string): Rule | null { enabled, sourceBlock: raw.sourceBlock, source: raw.source, + metadata: scopeMetadata, }; } -export async function readRules(cwd: string): Promise { - const rulesDir = join(cwd, '.dwf', 'rules'); - const entries = await readdir(rulesDir); +function extractScopeMetadata(doc: RawRuleFile, file: string): ScopeMetadata | undefined { + // Support both nested metadata block and top-level fields + const metadataRaw: Record = {}; + + if (doc.metadata && typeof doc.metadata === 'object') { + Object.assign(metadataRaw, doc.metadata); + } + + // Top-level fields take precedence over nested metadata block + if (doc.globs !== undefined) { + metadataRaw['globs'] = doc.globs; + } + if (doc.paths !== undefined) { + metadataRaw['paths'] = doc.paths; + } + if (doc.trigger !== undefined) { + metadataRaw['trigger'] = doc.trigger; + } + + if (Object.keys(metadataRaw).length === 0) { + return undefined; + } + + const { metadata, errors } = validateScopeMetadata(metadataRaw); + + for (const error of errors) { + console.warn(`Warning: ${error.field} in ${file}: ${error.message}`); + } + + if (errors.length > 0) { + return undefined; + } + + return metadata; +} + +export async function readRules(cwd: string, rulesPath?: string): Promise { + const rulesDir = resolveRulesDir(cwd, rulesPath); + if (!rulesDir) { + return []; + } + + let entries: string[]; + try { + entries = await readdir(rulesDir); + } catch { + return []; + } const ymlFiles = entries.filter((f) => f.endsWith('.yml') || f.endsWith('.yaml')); const allRules: Rule[] = []; @@ -125,24 +192,50 @@ export async function readRules(cwd: string): Promise { const raw = await readFile(join(rulesDir, file), 'utf-8'); const parsed: unknown = parse(raw); - if (!parsed || typeof parsed !== 'object') continue; + if (!parsed || typeof parsed !== 'object') { + continue; + } const doc = parsed as RawRuleFile; const scope = doc.scope ?? file.replace(/\.ya?ml$/, ''); - if (!Array.isArray(doc.rules)) continue; + if (!Array.isArray(doc.rules)) { + continue; + } if (!isValidScope(scope)) { console.warn(`Warning: invalid scope "${scope}" in ${file}, skipping rules`); continue; } + const scopeMetadata = extractScopeMetadata(doc, file); + for (const rawRule of doc.rules) { - if (!rawRule || typeof rawRule !== 'object') continue; - const rule = normalizeRule(rawRule, scope); - if (rule) allRules.push(rule); + if (!rawRule || typeof rawRule !== 'object') { + continue; + } + const rule = normalizeRule(rawRule, scope, scopeMetadata); + if (rule) { + allRules.push(rule); + } } } return allRules; } + +function resolveRulesDir(cwd: string, rulesPath?: string): string { + if (rulesPath) { + return rulesPath; + } + + const lastSegment = cwd.split(/[\\/]/).at(-1); + if (lastSegment === '.dwf') { + return join(cwd, 'rules'); + } + if (lastSegment === 'rules') { + return cwd; + } + + return join(cwd, '.dwf', 'rules'); +} diff --git a/packages/cli/src/core/schema.ts b/packages/cli/src/core/schema.ts index dce920e..0b7dc6f 100644 --- a/packages/cli/src/core/schema.ts +++ b/packages/cli/src/core/schema.ts @@ -1,3 +1,5 @@ +import type { ScopeMetadata } from '../bridges/types.js'; + export const SCOPE_REGEX = /^[a-z][a-z0-9]*(?::[a-z][a-z0-9-]*)?$/; export const BUILTIN_SCOPES = ['architecture', 'conventions', 'security', 'workflow', 'testing'] as const; @@ -6,6 +8,11 @@ export type BuiltinScope = (typeof BUILTIN_SCOPES)[number]; export const VALID_TOOL_IDS = ['claude', 'cursor', 'gemini', 'windsurf', 'copilot'] as const; export type ValidToolId = (typeof VALID_TOOL_IDS)[number]; +export const VALID_TRIGGERS = ['always', 'glob', 'manual'] as const; +export type ValidTrigger = (typeof VALID_TRIGGERS)[number]; + +export const VALID_CONFIG_VERSIONS = ['0.1', '0.2'] as const; + export function isValidScope(scope: string): boolean { return SCOPE_REGEX.test(scope); } @@ -13,3 +20,48 @@ export function isValidScope(scope: string): boolean { export function isBuiltinScope(scope: string): scope is BuiltinScope { return (BUILTIN_SCOPES as readonly string[]).includes(scope); } + +export function isValidTrigger(value: string): value is ValidTrigger { + return (VALID_TRIGGERS as readonly string[]).includes(value); +} + +export interface ScopeMetadataValidationError { + field: string; + message: string; +} + +export function validateScopeMetadata(raw: Record): { metadata: ScopeMetadata | undefined; errors: ScopeMetadataValidationError[] } { + const errors: ScopeMetadataValidationError[] = []; + const metadata: ScopeMetadata = {}; + let hasMetadata = false; + + if ('globs' in raw && raw['globs'] !== undefined) { + if (!Array.isArray(raw['globs']) || !raw['globs'].every((g): g is string => typeof g === 'string')) { + errors.push({ field: 'globs', message: 'globs must be an array of strings' }); + } else { + metadata.globs = raw['globs']; + hasMetadata = true; + } + } + + if ('paths' in raw && raw['paths'] !== undefined) { + if (!Array.isArray(raw['paths']) || !raw['paths'].every((p): p is string => typeof p === 'string')) { + errors.push({ field: 'paths', message: 'paths must be an array of strings' }); + } else { + metadata.paths = raw['paths']; + hasMetadata = true; + } + } + + if ('trigger' in raw && raw['trigger'] !== undefined) { + const triggerVal = String(raw['trigger']); + if (!isValidTrigger(triggerVal)) { + errors.push({ field: 'trigger', message: `trigger must be one of: ${VALID_TRIGGERS.join(', ')}. Got "${triggerVal}"` }); + } else { + metadata.trigger = triggerVal; + hasMetadata = true; + } + } + + return { metadata: hasMetadata ? metadata : undefined, errors }; +} diff --git a/packages/cli/src/core/scope-filename.ts b/packages/cli/src/core/scope-filename.ts new file mode 100644 index 0000000..68ae1aa --- /dev/null +++ b/packages/cli/src/core/scope-filename.ts @@ -0,0 +1,53 @@ +import { readdir, unlink } from 'node:fs/promises'; +import { join } from 'node:path'; + +const COLON_SEPARATOR = '-'; + +/** + * Convert a scope name to a filename using the given prefix and extension. + * + * Colons in the scope are replaced with hyphens. + * Example: scopeToFilename('team:payments', 'dwf-', '.md') => 'dwf-team-payments.md' + */ +export function scopeToFilename(scope: string, prefix: string, extension: string): string { + const sanitized = scope.replaceAll(':', COLON_SEPARATOR); + return `${prefix}${sanitized}${extension}`; +} + +/** + * Glob for files matching {prefix}*{extension} in a directory, + * delete any that are NOT in the currentFiles set. + * Returns the list of deleted file paths (relative to dir). + * + * Only touches files that start with the given prefix. + * Ignores files that don't match the prefix pattern. + * If the directory does not exist, returns an empty array without error. + */ +export async function cleanStaleFiles( + dir: string, + prefix: string, + extension: string, + currentFiles: Set, +): Promise { + let entries: string[]; + try { + entries = await readdir(dir); + } catch { + return []; + } + + const deleted: string[] = []; + + for (const entry of entries) { + if (!entry.startsWith(prefix) || !entry.endsWith(extension)) { + continue; + } + + if (!currentFiles.has(entry)) { + await unlink(join(dir, entry)); + deleted.push(entry); + } + } + + return deleted; +} diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index dda7ddd..b028e2b 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -9,6 +9,7 @@ import { registerListCommand } from './commands/list.js'; import { registerExplainCommand } from './commands/explain.js'; import { registerWatchCommand } from './commands/watch.js'; import { runMainMenu } from './commands/menu.js'; +import { renderBanner } from './utils/banner.js'; const require = createRequire(import.meta.url); const pkg = require('../package.json') as { version: string }; @@ -20,6 +21,15 @@ program .description('Compile developer rules into editor-specific config files') .version(pkg.version); +program.addHelpText('beforeAll', () => { + const banner = renderBanner(); + if (banner.length === 0) { + return ''; + } + + return `${banner}\n`; +}); + registerInitCommand(program); registerCompileCommand(program); registerDoctorCommand(program); diff --git a/packages/cli/src/utils/banner.ts b/packages/cli/src/utils/banner.ts new file mode 100644 index 0000000..4629cce --- /dev/null +++ b/packages/cli/src/utils/banner.ts @@ -0,0 +1,35 @@ +const BANNER_LINES = [ + "██████╗ ███████╗██╗ ██╗██╗ ██╗", + "██╔══██╗██╔════╝██║ ██║██║ ██║", + "██║ ██║█████╗ ██║ ██║██║ █╗ ██║", + "██║ ██║██╔══╝ ╚██╗ ██╔╝██║███╗██║", + "██████╔╝███████╗ ╚████╔╝ ╚███╔███╔╝", + "╚═════╝ ╚══════╝ ╚═══╝ ╚══╝╚══╝", +] as const; + +const GRADIENT_START = 252; +const GRADIENT_END = 240; + +function colorizeLine(line: string, color: number): string { + return `\u001b[38;5;${String(color)}m${line}\u001b[0m`; +} + +function gradientColor(index: number, total: number): number { + if (total <= 1) { + return GRADIENT_START; + } + + const ratio = index / (total - 1); + return Math.round(GRADIENT_START + (GRADIENT_END - GRADIENT_START) * ratio); +} + +export function renderBanner(): string { + if (!process.stdout.isTTY) { + return ''; + } + + return BANNER_LINES.map((line, index) => { + const color = gradientColor(index, BANNER_LINES.length); + return colorizeLine(line, color); + }).join('\n'); +} diff --git a/packages/cli/src/utils/cache.ts b/packages/cli/src/utils/cache.ts index 411d733..aaaf373 100644 --- a/packages/cli/src/utils/cache.ts +++ b/packages/cli/src/utils/cache.ts @@ -1,5 +1,7 @@ -import { readFile, writeFile, mkdir } from 'node:fs/promises'; -import { join, dirname } from 'node:path'; +import { mkdir, readFile, writeFile } from 'node:fs/promises'; +import { dirname, join } from 'node:path'; +import * as ui from './ui.js'; +import { fileExists } from './fs.js'; const TTL_MS = 3_600_000; // 1 hour @@ -11,7 +13,32 @@ interface CacheEntry { type CacheStore = Record>; function getCachePath(cwd: string): string { - return join(cwd, '.dwf', '.cache', 'registry.json'); + return join(cwd, '.dwf', '.cache', 'registry-store.json'); +} + +function getETagDataPath(cacheDir: string, cacheKey: string): string { + return join(cacheDir, `${cacheKey}.json`); +} + +function getETagPath(cacheDir: string, cacheKey: string): string { + return join(cacheDir, `${cacheKey}.etag`); +} + +async function readJsonFile(filePath: string): Promise { + try { + const raw = await readFile(filePath, 'utf-8'); + return JSON.parse(raw) as T; + } catch { + return null; + } +} + +async function readTextFile(filePath: string): Promise { + try { + return await readFile(filePath, 'utf-8'); + } catch { + return null; + } } async function readStore(cwd: string): Promise { @@ -53,3 +80,64 @@ export async function set(cwd: string, key: string, value: T): Promise store[key] = { data: value, timestamp: Date.now() }; await writeStore(cwd, store); } + +export interface FetchWithETagResult { + data: T; + fromCache: boolean; +} + +export async function fetchWithETag( + url: string, + cacheDir: string, + cacheKey: string, +): Promise> { + const dataPath = getETagDataPath(cacheDir, cacheKey); + const etagPath = getETagPath(cacheDir, cacheKey); + + const headers: Record = {}; + const cachedETagRaw = await readTextFile(etagPath); + const cachedETag = typeof cachedETagRaw === 'string' ? cachedETagRaw.trim() : ''; + if (cachedETag.length > 0) { + headers['If-None-Match'] = cachedETag; + } + + try { + const response = await fetch(url, { headers }); + + if (response.status === 304) { + const cachedData = await readJsonFile(dataPath); + if (cachedData === null) { + throw new Error(`Server returned 304 for ${cacheKey} but cache file is missing`); + } + + return { data: cachedData, fromCache: true }; + } + + if (!response.ok) { + throw new Error(`Request failed for ${cacheKey} (HTTP ${String(response.status)})`); + } + + const data = (await response.json()) as T; + await mkdir(dirname(dataPath), { recursive: true }); + await writeFile(dataPath, `${JSON.stringify(data, null, 2)}\n`, 'utf-8'); + + const etag = response.headers.get('etag'); + if (etag && etag.trim().length > 0) { + await writeFile(etagPath, `${etag.trim()}\n`, 'utf-8'); + } + + return { data, fromCache: false }; + } catch (error) { + const hasDataCache = await fileExists(dataPath); + if (hasDataCache) { + const cachedData = await readJsonFile(dataPath); + if (cachedData !== null) { + ui.warn('Using cached registry data because the network request failed.'); + return { data: cachedData, fromCache: true }; + } + } + + const reason = error instanceof Error ? error.message : String(error); + throw new Error(`Unable to fetch ${cacheKey}: ${reason}`); + } +} diff --git a/packages/cli/src/utils/github.ts b/packages/cli/src/utils/github.ts index 996f342..a955f69 100644 --- a/packages/cli/src/utils/github.ts +++ b/packages/cli/src/utils/github.ts @@ -1,7 +1,12 @@ +import { join } from 'node:path'; +import { fetchWithETag } from './cache.js'; +import type { Registry, RegistryAssets, RegistryRule } from './registry.js'; + const BRANCH = 'main'; const REPO = 'gpolanco/dev-workflows'; const RAW_BASE = `https://raw.githubusercontent.com/${REPO}/${BRANCH}/content`; const API_BASE = `https://api.github.com/repos/${REPO}/contents/content`; +const REGISTRY_URL = `${RAW_BASE}/registry.json`; export class GitHubError extends Error { constructor( @@ -100,3 +105,108 @@ export async function listDirectory(path?: string): Promise { type: entry.type, })); } + +function isStringArray(value: unknown): value is string[] { + return Array.isArray(value) && value.every((item) => typeof item === 'string'); +} + +function parseRegistryRule(value: unknown): RegistryRule | null { + if (!value || typeof value !== 'object') { + return null; + } + + const record = value as Record; + if ( + typeof record['path'] !== 'string' || + typeof record['name'] !== 'string' || + typeof record['description'] !== 'string' || + typeof record['version'] !== 'string' || + typeof record['scope'] !== 'string' || + !isStringArray(record['tags']) || + typeof record['size_bytes'] !== 'number' + ) { + return null; + } + + return { + path: record['path'], + name: record['name'], + description: record['description'], + version: record['version'], + scope: record['scope'], + tags: record['tags'], + size_bytes: record['size_bytes'], + }; +} + +function parseRegistryAssets(value: unknown): RegistryAssets | null { + if (!value || typeof value !== 'object') { + return null; + } + + const record = value as Record; + if ( + !isStringArray(record['commands']) || + !isStringArray(record['templates']) || + !isStringArray(record['hooks']) || + !isStringArray(record['presets']) + ) { + return null; + } + + return { + commands: record['commands'], + templates: record['templates'], + hooks: record['hooks'], + presets: record['presets'], + }; +} + +function parseRegistry(value: unknown): Registry { + if (!value || typeof value !== 'object') { + throw new GitHubError('Invalid registry.json: expected an object', 0); + } + + const record = value as Record; + const rulesRaw = record['rules']; + if (!Array.isArray(rulesRaw)) { + throw new GitHubError('Invalid registry.json: missing rules array', 0); + } + + const rules: RegistryRule[] = []; + for (const rule of rulesRaw) { + const parsed = parseRegistryRule(rule); + if (parsed === null) { + throw new GitHubError('Invalid registry.json: rule entry has invalid shape', 0); + } + rules.push(parsed); + } + + const assets = parseRegistryAssets(record['assets']); + if (assets === null) { + throw new GitHubError('Invalid registry.json: invalid assets object', 0); + } + + if (typeof record['version'] !== 'number' || typeof record['generated_at'] !== 'string') { + throw new GitHubError('Invalid registry.json: missing version or generated_at', 0); + } + + return { + version: record['version'], + generated_at: record['generated_at'], + rules, + assets, + }; +} + +export async function fetchRegistry(cwd: string): Promise { + const cacheDir = join(cwd, '.dwf', '.cache'); + + try { + const result = await fetchWithETag(REGISTRY_URL, cacheDir, 'registry'); + return parseRegistry(result.data); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + throw new GitHubError(`Could not fetch registry manifest: ${message}`, 0); + } +} diff --git a/packages/cli/src/utils/prompt.ts b/packages/cli/src/utils/prompt.ts new file mode 100644 index 0000000..2e34b45 --- /dev/null +++ b/packages/cli/src/utils/prompt.ts @@ -0,0 +1,178 @@ +import * as p from '@clack/prompts'; + +export interface PromptOption { + value: T; + label: string; + hint?: string; +} + +interface SelectPromptOptions { + message: string; + options: ReadonlyArray>; + initialValue?: T; +} + +interface MultiselectPromptOptions { + message: string; + options: ReadonlyArray>; + required?: boolean; + initialValues?: ReadonlyArray; +} + +interface ConfirmPromptOptions { + message: string; + defaultValue?: boolean; +} + +interface TextPromptOptions { + message: string; + placeholder?: string; + defaultValue?: string; +} + +interface SpinnerTask { + label: string; + task: () => Promise; + successMessage?: string; + errorMessage?: string; +} + +function toClackOption(option: PromptOption): p.Option { + const mapped = { + value: option.value, + label: option.label, + } as { value: T; label: string; hint?: string }; + + if (option.hint !== undefined) { + mapped.hint = option.hint; + } + + return mapped as p.Option; +} + +function ensureInteractive(): void { + if (!isInteractiveSession()) { + throw new Error( + 'Interactive prompts are unavailable in non-interactive mode (TTY/CI). Use CLI flags to run non-interactively.', + ); + } +} + +function isCiEnvironment(): boolean { + const rawCi = process.env['CI']; + if (typeof rawCi !== 'string') { + return false; + } + + const normalized = rawCi.trim().toLowerCase(); + if (normalized.length === 0) { + return false; + } + + return normalized !== '0' && normalized !== 'false'; +} + +export function isInteractiveSession(): boolean { + return Boolean(process.stdout.isTTY && process.stdin.isTTY) && !isCiEnvironment(); +} + +function handleCancel(value: T | symbol): T { + if (p.isCancel(value)) { + p.cancel('Cancelled'); + process.exit(0); + } + + return value as T; +} + +export async function selectPrompt(options: SelectPromptOptions): Promise { + ensureInteractive(); + + const value = await p.select({ + message: options.message, + initialValue: options.initialValue, + options: options.options.map((option) => toClackOption(option)), + }); + + return handleCancel(value); +} + +export async function multiselectPrompt( + options: MultiselectPromptOptions, +): Promise { + ensureInteractive(); + + const value = await p.multiselect({ + message: options.message, + required: options.required, + initialValues: options.initialValues ? [...options.initialValues] : undefined, + options: options.options.map((option) => toClackOption(option)), + }); + + return handleCancel(value); +} + +export async function confirmPrompt(options: ConfirmPromptOptions): Promise { + ensureInteractive(); + + const value = await p.confirm({ + message: options.message, + initialValue: options.defaultValue, + }); + + return handleCancel(value); +} + +export async function textPrompt(options: TextPromptOptions): Promise { + ensureInteractive(); + + const value = await p.text({ + message: options.message, + placeholder: options.placeholder, + defaultValue: options.defaultValue, + }); + + return handleCancel(value); +} + +export function introPrompt(message: string): void { + if (!isInteractiveSession()) { + return; + } + + p.intro(message); +} + +export function outroPrompt(message: string): void { + if (!isInteractiveSession()) { + return; + } + + p.outro(message); +} + +export function notePrompt(message: string, title?: string): void { + if (!isInteractiveSession()) { + return; + } + + p.note(message, title); +} + +export async function spinnerTask(options: SpinnerTask): Promise { + if (!isInteractiveSession()) { + return options.task(); + } + + const spinner = p.spinner(); + spinner.start(options.label); + + try { + const result = await options.task(); + spinner.stop(options.successMessage ?? options.label); + return result; + } catch (error) { + spinner.stop(options.errorMessage ?? options.label); + throw error; + } +} diff --git a/packages/cli/src/utils/registry.ts b/packages/cli/src/utils/registry.ts new file mode 100644 index 0000000..b98fcff --- /dev/null +++ b/packages/cli/src/utils/registry.ts @@ -0,0 +1,59 @@ +export interface RegistryRule { + path: string; + name: string; + description: string; + version: string; + scope: string; + tags: string[]; + size_bytes: number; +} + +export interface RegistryAssets { + commands: string[]; + templates: string[]; + hooks: string[]; + presets: string[]; +} + +export interface Registry { + version: number; + generated_at: string; + rules: RegistryRule[]; + assets: RegistryAssets; +} + +export function filterRegistryByTag(registry: Registry, tag: string): RegistryRule[] { + const normalizedTag = tag.trim().toLowerCase(); + if (normalizedTag.length === 0) { + return [...registry.rules]; + } + + return registry.rules.filter((rule) => + rule.tags.some((ruleTag) => ruleTag.toLowerCase() === normalizedTag), + ); +} + +export function searchRegistry(registry: Registry, query: string): RegistryRule[] { + const terms = query + .trim() + .toLowerCase() + .split(/\s+/) + .filter((term) => term.length > 0); + + if (terms.length === 0) { + return [...registry.rules]; + } + + return registry.rules.filter((rule) => { + const searchableFields = [ + rule.name, + rule.description, + rule.path, + ...rule.tags, + ].map((field) => field.toLowerCase()); + + return terms.every((term) => + searchableFields.some((fieldValue) => fieldValue.includes(term)), + ); + }); +} diff --git a/packages/cli/src/utils/table.ts b/packages/cli/src/utils/table.ts new file mode 100644 index 0000000..33cf9cd --- /dev/null +++ b/packages/cli/src/utils/table.ts @@ -0,0 +1,59 @@ +import pc from 'picocolors'; + +const INDENT = ' '; + +function padCell(value: string, width: number): string { + return value.padEnd(width, ' '); +} + +function line(left: string, middle: string, right: string, widths: number[]): string { + const segments = widths.map((width) => '─'.repeat(width + 2)); + return `${INDENT}${left}${segments.join(middle)}${right}`; +} + +function inferWidths(headers: string[], rows: string[][], columnWidths?: number[]): number[] { + return headers.map((header, index) => { + const headerWidth = header.length; + const rowWidth = rows.reduce((maxWidth, row) => { + const value = row[index] ?? ''; + return Math.max(maxWidth, value.length); + }, 0); + const minWidth = columnWidths?.[index] ?? 0; + return Math.max(headerWidth, rowWidth, minWidth); + }); +} + +function renderRow(cells: string[], widths: number[], bold = false): string { + const rendered = widths.map((width, index) => { + const rawCell = cells[index] ?? ''; + const paddedCell = ` ${padCell(rawCell, width)} `; + return bold ? pc.bold(paddedCell) : paddedCell; + }); + + return `${INDENT}│${rendered.join('│')}│`; +} + +export function renderTable( + headers: string[], + rows: string[][], + columnWidths?: number[], +): string { + if (headers.length === 0) { + return ''; + } + + const widths = inferWidths(headers, rows, columnWidths); + const output: string[] = []; + + output.push(line('┌', '┬', '┐', widths)); + output.push(renderRow(headers, widths, true)); + output.push(line('├', '┼', '┤', widths)); + + for (const row of rows) { + output.push(renderRow(row, widths)); + } + + output.push(line('└', '┴', '┘', widths)); + + return output.join('\n'); +} diff --git a/packages/cli/src/utils/ui.ts b/packages/cli/src/utils/ui.ts index 815c4ff..3c4a9da 100644 --- a/packages/cli/src/utils/ui.ts +++ b/packages/cli/src/utils/ui.ts @@ -1,4 +1,4 @@ -import chalk from 'chalk'; +import pc from 'picocolors'; export const ICONS = { success: '\u2714', @@ -19,22 +19,22 @@ const INDENT = { } as const; export function success(msg: string): void { - console.log(`${INDENT.section}${chalk.green(ICONS.success)} ${msg}`); + console.log(`${INDENT.section}${pc.green(ICONS.success)} ${msg}`); } export function error(msg: string, hint?: string): void { - console.error(`${INDENT.section}${chalk.red(ICONS.error)} ${chalk.red(msg)}`); + console.error(`${INDENT.section}${pc.red(ICONS.error)} ${pc.red(msg)}`); if (hint) { - console.error(`${INDENT.detail}${chalk.dim(hint)}`); + console.error(`${INDENT.detail}${pc.dim(hint)}`); } } export function warn(msg: string): void { - console.log(`${INDENT.section}${chalk.yellow(ICONS.warn)} ${chalk.yellow(msg)}`); + console.log(`${INDENT.section}${pc.yellow(ICONS.warn)} ${pc.yellow(msg)}`); } export function info(msg: string): void { - console.log(`${INDENT.section}${chalk.dim(msg)}`); + console.log(`${INDENT.section}${pc.dim(msg)}`); } export function log(msg: string): void { @@ -42,16 +42,16 @@ export function log(msg: string): void { } export function header(title: string): void { - console.log(`${INDENT.section}${chalk.bold(title)}`); + console.log(`${INDENT.section}${pc.bold(title)}`); } export function keyValue(label: string, value: string): void { const padded = label.padEnd(10); - console.log(`${INDENT.detail}${chalk.dim(padded)}${value}`); + console.log(`${INDENT.detail}${pc.dim(padded)}${value}`); } export function divider(): void { - console.log(`${INDENT.section}${chalk.dim(`${ICONS.separator}${ICONS.separator}`)}`); + console.log(`${INDENT.section}${pc.dim(`${ICONS.separator}${ICONS.separator}`)}`); } export function newline(): void { @@ -65,36 +65,36 @@ export function summary(counts: { passed?: number; failed?: number; skipped?: nu const skipped = counts.skipped ?? 0; if (passed > 0 || (failed === 0 && skipped === 0)) { - parts.push(chalk.green(`${String(passed)} passed`)); + parts.push(pc.green(`${String(passed)} passed`)); } if (failed > 0 || (passed === 0 && skipped === 0)) { - parts.push(chalk.red(`${String(failed)} failed`)); + parts.push(pc.red(`${String(failed)} failed`)); } if (skipped > 0) { - parts.push(chalk.dim(`${String(skipped)} skipped`)); + parts.push(pc.dim(`${String(skipped)} skipped`)); } - console.log(`${INDENT.section}${parts.join(chalk.dim(` ${ICONS.dot} `))}`); + console.log(`${INDENT.section}${parts.join(pc.dim(` ${ICONS.dot} `))}`); } export function timing(ms: number): string { - return chalk.dim(`(${String(Math.round(ms))}ms)`); + return pc.dim(`(${String(Math.round(ms))}ms)`); } export function list(items: string[]): void { for (const item of items) { - console.log(`${INDENT.detail}${chalk.dim(ICONS.bullet)} ${item}`); + console.log(`${INDENT.detail}${pc.dim(ICONS.bullet)} ${item}`); } } export function check(passed: boolean, msg: string, skipped?: boolean): void { if (skipped) { - console.log(`${INDENT.section}${chalk.dim(ICONS.skip)} ${chalk.dim(msg)}`); + console.log(`${INDENT.section}${pc.dim(ICONS.skip)} ${pc.dim(msg)}`); return; } if (passed) { - console.log(`${INDENT.section}${chalk.green(ICONS.success)} ${msg}`); + console.log(`${INDENT.section}${pc.green(ICONS.success)} ${msg}`); } else { - console.log(`${INDENT.section}${chalk.red(ICONS.error)} ${chalk.red(msg)}`); + console.log(`${INDENT.section}${pc.red(ICONS.error)} ${pc.red(msg)}`); } } diff --git a/packages/cli/tests/bridges/claude.test.ts b/packages/cli/tests/bridges/claude.test.ts new file mode 100644 index 0000000..cc289fe --- /dev/null +++ b/packages/cli/tests/bridges/claude.test.ts @@ -0,0 +1,200 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { claudeBridge } from '../../src/bridges/claude.js'; +import type { Rule, ProjectConfig } from '../../src/bridges/types.js'; + +function makeRule(overrides: Partial = {}): Rule { + return { + id: 'test-rule', + scope: 'architecture', + severity: 'error', + content: 'Test content', + enabled: true, + ...overrides, + }; +} + +const CONFIG: ProjectConfig = { + version: '0.1', + project: { name: 'test' }, + tools: ['claude'], + mode: 'copy', + blocks: [], + pulled: [], + assets: [], + global: true, +}; + +describe('claudeBridge', () => { + it('has correct id', () => { + assert.equal(claudeBridge.id, 'claude'); + }); + + it('has kind directory', () => { + assert.equal(claudeBridge.kind, 'directory'); + }); + + it('has correct output directory config', () => { + assert.equal(claudeBridge.outputDir, '.claude/rules'); + assert.equal(claudeBridge.filePrefix, 'dwf-'); + assert.equal(claudeBridge.fileExtension, '.md'); + }); + + it('generates one file per scope', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Use kebab-case.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + + assert.equal(output.size, 2); + assert.ok(output.has('.claude/rules/dwf-architecture.md')); + assert.ok(output.has('.claude/rules/dwf-conventions.md')); + }); + + it('includes generated comment in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-architecture.md') ?? ''; + + assert.ok(content.includes('')); + }); + + it('includes scope heading in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Use kebab-case.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + + const archContent = output.get('.claude/rules/dwf-architecture.md') ?? ''; + assert.ok(archContent.includes('# Architecture')); + assert.ok(archContent.includes('- Use named exports.')); + + const convContent = output.get('.claude/rules/dwf-conventions.md') ?? ''; + assert.ok(convContent.includes('# Conventions')); + assert.ok(convContent.includes('- Use kebab-case.')); + }); + + it('does not include frontmatter when no metadata paths', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-architecture.md') ?? ''; + + assert.ok(!content.includes('---')); + assert.ok(!content.includes('paths:')); + }); + + it('includes paths frontmatter when metadata has paths', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { paths: ['src/'] }, + }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-conventions.md') ?? ''; + + assert.ok(content.includes('---')); + assert.ok(content.includes('paths:')); + assert.ok(content.includes(' - "src/"')); + }); + + it('handles scope with colon (custom scope)', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'team:payments', content: 'No raw SQL.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + + assert.ok(output.has('.claude/rules/dwf-team-payments.md')); + const content = output.get('.claude/rules/dwf-team-payments.md') ?? ''; + assert.ok(content.includes('# team:payments')); + assert.ok(content.includes('- No raw SQL.')); + }); + + it('filters out info and disabled rules', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Keep this.' }), + makeRule({ id: 'rule-b', scope: 'architecture', severity: 'info', content: 'Skip info.' }), + makeRule({ id: 'rule-c', scope: 'architecture', enabled: false, content: 'Skip disabled.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-architecture.md') ?? ''; + + assert.ok(content.includes('Keep this.')); + assert.ok(!content.includes('Skip info.')); + assert.ok(!content.includes('Skip disabled.')); + }); + + it('returns empty Map when no rules are enabled', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', enabled: false, content: 'Disabled.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + + assert.equal(output.size, 0); + }); + + it('sorts scopes: built-in first, then custom alphabetically', () => { + const rules = [ + makeRule({ id: 'rule-z', scope: 'team:payments', content: 'No raw SQL.' }), + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Kebab case.' }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const keys = [...output.keys()]; + + assert.equal(keys[0], '.claude/rules/dwf-architecture.md'); + assert.equal(keys[1], '.claude/rules/dwf-conventions.md'); + assert.equal(keys[2], '.claude/rules/dwf-team-payments.md'); + }); + + it('handles multiple paths in metadata', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { paths: ['src/', 'lib/'] }, + }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-conventions.md') ?? ''; + + assert.ok(content.includes(' - "src/"')); + assert.ok(content.includes(' - "lib/"')); + }); + + it('ignores empty paths array in metadata', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'architecture', + content: 'Content.', + metadata: { paths: [] }, + }), + ]; + + const output = claudeBridge.compile(rules, CONFIG); + const content = output.get('.claude/rules/dwf-architecture.md') ?? ''; + + assert.ok(!content.includes('---')); + assert.ok(!content.includes('paths:')); + }); +}); diff --git a/packages/cli/tests/bridges/copilot.test.ts b/packages/cli/tests/bridges/copilot.test.ts index 79e8085..2b04333 100644 --- a/packages/cli/tests/bridges/copilot.test.ts +++ b/packages/cli/tests/bridges/copilot.test.ts @@ -1,6 +1,7 @@ import { describe, it } from 'node:test'; import assert from 'node:assert/strict'; import { copilotBridge } from '../../src/bridges/copilot.js'; +import { isMarkerBridge, isDirectoryBridge } from '../../src/bridges/types.js'; import type { Rule, ProjectConfig } from '../../src/bridges/types.js'; function makeRule(overrides: Partial = {}): Rule { @@ -22,6 +23,7 @@ const CONFIG: ProjectConfig = { blocks: [], pulled: [], assets: [], + global: true, }; describe('copilotBridge', () => { @@ -29,6 +31,10 @@ describe('copilotBridge', () => { assert.equal(copilotBridge.id, 'copilot'); }); + it('has kind marker', () => { + assert.equal(copilotBridge.kind, 'marker'); + }); + it('has correct output path', () => { assert.deepEqual(copilotBridge.outputPaths, ['.github/copilot-instructions.md']); }); @@ -84,6 +90,11 @@ describe('copilotBridge', () => { assert.ok(!content.includes('## Team:payments')); }); + it('is identified as MarkerBridge by type guard', () => { + assert.equal(isMarkerBridge(copilotBridge), true); + assert.equal(isDirectoryBridge(copilotBridge), false); + }); + it('filters out info and disabled rules', () => { const rules = [ makeRule({ id: 'rule-a', scope: 'architecture', content: 'Keep this.' }), diff --git a/packages/cli/tests/bridges/cursor.test.ts b/packages/cli/tests/bridges/cursor.test.ts new file mode 100644 index 0000000..66ca3c7 --- /dev/null +++ b/packages/cli/tests/bridges/cursor.test.ts @@ -0,0 +1,189 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { cursorBridge } from '../../src/bridges/cursor.js'; +import type { Rule, ProjectConfig } from '../../src/bridges/types.js'; + +function makeRule(overrides: Partial = {}): Rule { + return { + id: 'test-rule', + scope: 'architecture', + severity: 'error', + content: 'Test content', + enabled: true, + ...overrides, + }; +} + +const CONFIG: ProjectConfig = { + version: '0.1', + project: { name: 'test' }, + tools: ['cursor'], + mode: 'copy', + blocks: [], + pulled: [], + assets: [], + global: true, +}; + +describe('cursorBridge', () => { + it('has correct id', () => { + assert.equal(cursorBridge.id, 'cursor'); + }); + + it('has kind directory', () => { + assert.equal(cursorBridge.kind, 'directory'); + }); + + it('has correct output directory config', () => { + assert.equal(cursorBridge.outputDir, '.cursor/rules'); + assert.equal(cursorBridge.filePrefix, 'dwf-'); + assert.equal(cursorBridge.fileExtension, '.mdc'); + }); + + it('generates one file per scope', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Use kebab-case.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + + assert.equal(output.size, 2); + assert.ok(output.has('.cursor/rules/dwf-architecture.mdc')); + assert.ok(output.has('.cursor/rules/dwf-conventions.mdc')); + }); + + it('includes generated comment in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-architecture.mdc') ?? ''; + + assert.ok(content.includes('')); + }); + + it('includes default frontmatter with alwaysApply: true when no globs', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-architecture.mdc') ?? ''; + + assert.ok(content.includes('---')); + assert.ok(content.includes('description: Architecture rules generated by dev-workflows')); + assert.ok(content.includes('alwaysApply: true')); + assert.ok(content.includes('globs:')); + }); + + it('includes globs frontmatter and alwaysApply: false when globs specified', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { globs: ['**/*.ts', '**/*.tsx'] }, + }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-conventions.mdc') ?? ''; + + assert.ok(content.includes('globs: ["**/*.ts", "**/*.tsx"]')); + assert.ok(content.includes('alwaysApply: false')); + }); + + it('includes scope heading in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-architecture.mdc') ?? ''; + + assert.ok(content.includes('# Architecture')); + assert.ok(content.includes('- Use named exports.')); + }); + + it('handles scope with colon (custom scope)', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'team:payments', content: 'No raw SQL.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + + assert.ok(output.has('.cursor/rules/dwf-team-payments.mdc')); + const content = output.get('.cursor/rules/dwf-team-payments.mdc') ?? ''; + assert.ok(content.includes('# team:payments')); + assert.ok(content.includes('- No raw SQL.')); + }); + + it('filters out info and disabled rules', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Keep this.' }), + makeRule({ id: 'rule-b', scope: 'architecture', severity: 'info', content: 'Skip info.' }), + makeRule({ id: 'rule-c', scope: 'architecture', enabled: false, content: 'Skip disabled.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-architecture.mdc') ?? ''; + + assert.ok(content.includes('Keep this.')); + assert.ok(!content.includes('Skip info.')); + assert.ok(!content.includes('Skip disabled.')); + }); + + it('returns empty Map when no rules are enabled', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', enabled: false, content: 'Disabled.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + + assert.equal(output.size, 0); + }); + + it('sorts scopes: built-in first, then custom alphabetically', () => { + const rules = [ + makeRule({ id: 'rule-z', scope: 'team:payments', content: 'No raw SQL.' }), + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Kebab case.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const keys = [...output.keys()]; + + assert.equal(keys[0], '.cursor/rules/dwf-architecture.mdc'); + assert.equal(keys[1], '.cursor/rules/dwf-conventions.mdc'); + assert.equal(keys[2], '.cursor/rules/dwf-team-payments.mdc'); + }); + + it('generates correct description for custom scopes', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'team:payments', content: 'No raw SQL.' }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-team-payments.mdc') ?? ''; + + assert.ok(content.includes('description: team:payments rules generated by dev-workflows')); + }); + + it('ignores empty globs array in metadata', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'architecture', + content: 'Content.', + metadata: { globs: [] }, + }), + ]; + + const output = cursorBridge.compile(rules, CONFIG); + const content = output.get('.cursor/rules/dwf-architecture.mdc') ?? ''; + + assert.ok(content.includes('alwaysApply: true')); + }); +}); diff --git a/packages/cli/tests/bridges/gemini.test.ts b/packages/cli/tests/bridges/gemini.test.ts new file mode 100644 index 0000000..02999b7 --- /dev/null +++ b/packages/cli/tests/bridges/gemini.test.ts @@ -0,0 +1,112 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { geminiBridge } from '../../src/bridges/gemini.js'; +import { isMarkerBridge, isDirectoryBridge } from '../../src/bridges/types.js'; +import type { Rule, ProjectConfig } from '../../src/bridges/types.js'; + +function makeRule(overrides: Partial = {}): Rule { + return { + id: 'test-rule', + scope: 'architecture', + severity: 'error', + content: 'Test content', + enabled: true, + ...overrides, + }; +} + +const CONFIG: ProjectConfig = { + version: '0.1', + project: { name: 'test' }, + tools: ['gemini'], + mode: 'copy', + blocks: [], + pulled: [], + assets: [], + global: true, +}; + +describe('geminiBridge', () => { + it('has correct id', () => { + assert.equal(geminiBridge.id, 'gemini'); + }); + + it('has kind marker', () => { + assert.equal(geminiBridge.kind, 'marker'); + }); + + it('has correct output path', () => { + assert.deepEqual(geminiBridge.outputPaths, ['GEMINI.md']); + }); + + it('uses markers', () => { + assert.equal(geminiBridge.usesMarkers, true); + }); + + it('is identified as MarkerBridge by type guard', () => { + assert.equal(isMarkerBridge(geminiBridge), true); + assert.equal(isDirectoryBridge(geminiBridge), false); + }); + + it('generates correct markdown output', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Use kebab-case.' }), + ]; + + const output = geminiBridge.compile(rules, CONFIG); + const content = output.get('GEMINI.md'); + + assert.ok(content); + assert.ok(content.includes('# Project Rules')); + assert.ok(content.includes('## Architecture')); + assert.ok(content.includes('- Use named exports.')); + assert.ok(content.includes('## Conventions')); + assert.ok(content.includes('- Use kebab-case.')); + }); + + it('sorts scopes: built-in first, then custom alphabetically', () => { + const rules = [ + makeRule({ id: 'rule-z', scope: 'team:payments', content: 'No raw SQL.' }), + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Named exports.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Kebab case.' }), + ]; + + const output = geminiBridge.compile(rules, CONFIG); + const content = output.get('GEMINI.md') ?? ''; + + const archIndex = content.indexOf('## Architecture'); + const convIndex = content.indexOf('## Conventions'); + const teamIndex = content.indexOf('## team:payments'); + + assert.ok(archIndex < convIndex, 'Architecture should come before Conventions'); + assert.ok(convIndex < teamIndex, 'Conventions should come before team:payments'); + }); + + it('renders custom scopes without capitalization', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'team:payments', content: 'No raw SQL.' }), + ]; + + const output = geminiBridge.compile(rules, CONFIG); + const content = output.get('GEMINI.md') ?? ''; + + assert.ok(content.includes('## team:payments')); + assert.ok(!content.includes('## Team:payments')); + }); + + it('filters out info and disabled rules', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Keep this.' }), + makeRule({ id: 'rule-b', scope: 'architecture', severity: 'info', content: 'Skip info.' }), + makeRule({ id: 'rule-c', scope: 'architecture', enabled: false, content: 'Skip disabled.' }), + ]; + + const output = geminiBridge.compile(rules, CONFIG); + const content = output.get('GEMINI.md') ?? ''; + + assert.ok(content.includes('Keep this.')); + assert.ok(!content.includes('Skip info.')); + assert.ok(!content.includes('Skip disabled.')); + }); +}); diff --git a/packages/cli/tests/bridges/windsurf.test.ts b/packages/cli/tests/bridges/windsurf.test.ts index 2c7d239..0327ba6 100644 --- a/packages/cli/tests/bridges/windsurf.test.ts +++ b/packages/cli/tests/bridges/windsurf.test.ts @@ -22,6 +22,7 @@ const CONFIG: ProjectConfig = { blocks: [], pulled: [], assets: [], + global: true, }; describe('windsurfBridge', () => { @@ -29,63 +30,142 @@ describe('windsurfBridge', () => { assert.equal(windsurfBridge.id, 'windsurf'); }); - it('has correct output path', () => { - assert.deepEqual(windsurfBridge.outputPaths, ['.windsurf/rules/devworkflows.md']); + it('has kind directory', () => { + assert.equal(windsurfBridge.kind, 'directory'); }); - it('does not use markers', () => { - assert.equal(windsurfBridge.usesMarkers, false); + it('has correct output directory config', () => { + assert.equal(windsurfBridge.outputDir, '.windsurf/rules'); + assert.equal(windsurfBridge.filePrefix, 'dwf-'); + assert.equal(windsurfBridge.fileExtension, '.md'); }); - it('generates correct markdown output', () => { + it('generates one file per scope', () => { const rules = [ makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), makeRule({ id: 'rule-b', scope: 'conventions', content: 'Use kebab-case.' }), ]; const output = windsurfBridge.compile(rules, CONFIG); - const content = output.get('.windsurf/rules/devworkflows.md'); - assert.ok(content); + assert.equal(output.size, 2); + assert.ok(output.has('.windsurf/rules/dwf-architecture.md')); + assert.ok(output.has('.windsurf/rules/dwf-conventions.md')); + }); + + it('includes generated comment in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const content = output.get('.windsurf/rules/dwf-architecture.md') ?? ''; + assert.ok(content.includes('')); - assert.ok(content.includes('# Project Rules')); - assert.ok(content.includes('## Architecture')); - assert.ok(content.includes('- Use named exports.')); - assert.ok(content.includes('## Conventions')); - assert.ok(content.includes('- Use kebab-case.')); }); - it('sorts scopes: built-in first, then custom alphabetically', () => { + it('includes default frontmatter with trigger: always', () => { const rules = [ - makeRule({ id: 'rule-z', scope: 'team:payments', content: 'No raw SQL.' }), - makeRule({ id: 'rule-a', scope: 'architecture', content: 'Named exports.' }), - makeRule({ id: 'rule-c', scope: 'agent:reviewer', content: 'Review carefully.' }), - makeRule({ id: 'rule-b', scope: 'conventions', content: 'Kebab case.' }), + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const content = output.get('.windsurf/rules/dwf-architecture.md') ?? ''; + + assert.ok(content.includes('---')); + assert.ok(content.includes('trigger: always')); + }); + + it('uses metadata trigger when specified', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { trigger: 'manual' }, + }), ]; const output = windsurfBridge.compile(rules, CONFIG); - const content = output.get('.windsurf/rules/devworkflows.md') ?? ''; + const content = output.get('.windsurf/rules/dwf-conventions.md') ?? ''; + + assert.ok(content.includes('trigger: manual')); + }); - const archIndex = content.indexOf('## Architecture'); - const convIndex = content.indexOf('## Conventions'); - const agentIndex = content.indexOf('## agent:reviewer'); - const teamIndex = content.indexOf('## team:payments'); + it('includes globs when trigger is glob and globs are provided', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { trigger: 'glob', globs: ['**/*.ts', '**/*.tsx'] }, + }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const content = output.get('.windsurf/rules/dwf-conventions.md') ?? ''; - assert.ok(archIndex < convIndex, 'Architecture should come before Conventions'); - assert.ok(convIndex < agentIndex, 'Conventions should come before agent:reviewer'); - assert.ok(agentIndex < teamIndex, 'agent:reviewer should come before team:payments'); + assert.ok(content.includes('trigger: glob')); + assert.ok(content.includes('globs: ["**/*.ts", "**/*.tsx"]')); }); - it('renders custom scopes without capitalization', () => { + it('does not include globs when trigger is always even if globs present', () => { + const rules = [ + makeRule({ + id: 'rule-a', + scope: 'conventions', + content: 'Use kebab-case.', + metadata: { trigger: 'always', globs: ['**/*.ts'] }, + }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const content = output.get('.windsurf/rules/dwf-conventions.md') ?? ''; + + assert.ok(content.includes('trigger: always')); + assert.ok(!content.includes('globs:')); + }); + + it('includes scope heading in each file', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Use named exports.' }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const content = output.get('.windsurf/rules/dwf-architecture.md') ?? ''; + + assert.ok(content.includes('# Architecture')); + assert.ok(content.includes('- Use named exports.')); + }); + + it('handles scope with colon (custom scope)', () => { const rules = [ makeRule({ id: 'rule-a', scope: 'team:payments', content: 'No raw SQL.' }), ]; const output = windsurfBridge.compile(rules, CONFIG); - const content = output.get('.windsurf/rules/devworkflows.md') ?? ''; - assert.ok(content.includes('## team:payments')); - assert.ok(!content.includes('## Team:payments')); + assert.ok(output.has('.windsurf/rules/dwf-team-payments.md')); + const content = output.get('.windsurf/rules/dwf-team-payments.md') ?? ''; + assert.ok(content.includes('# team:payments')); + assert.ok(content.includes('- No raw SQL.')); + }); + + it('sorts scopes: built-in first, then custom alphabetically', () => { + const rules = [ + makeRule({ id: 'rule-z', scope: 'team:payments', content: 'No raw SQL.' }), + makeRule({ id: 'rule-a', scope: 'architecture', content: 'Named exports.' }), + makeRule({ id: 'rule-c', scope: 'agent:reviewer', content: 'Review carefully.' }), + makeRule({ id: 'rule-b', scope: 'conventions', content: 'Kebab case.' }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + const keys = [...output.keys()]; + + assert.equal(keys[0], '.windsurf/rules/dwf-architecture.md'); + assert.equal(keys[1], '.windsurf/rules/dwf-conventions.md'); + assert.equal(keys[2], '.windsurf/rules/dwf-agent-reviewer.md'); + assert.equal(keys[3], '.windsurf/rules/dwf-team-payments.md'); }); it('filters out info and disabled rules', () => { @@ -96,10 +176,20 @@ describe('windsurfBridge', () => { ]; const output = windsurfBridge.compile(rules, CONFIG); - const content = output.get('.windsurf/rules/devworkflows.md') ?? ''; + const content = output.get('.windsurf/rules/dwf-architecture.md') ?? ''; assert.ok(content.includes('Keep this.')); assert.ok(!content.includes('Skip info.')); assert.ok(!content.includes('Skip disabled.')); }); + + it('returns empty Map when no rules are enabled', () => { + const rules = [ + makeRule({ id: 'rule-a', scope: 'architecture', enabled: false, content: 'Disabled.' }), + ]; + + const output = windsurfBridge.compile(rules, CONFIG); + + assert.equal(output.size, 0); + }); }); diff --git a/packages/cli/tests/commands/compile.test.ts b/packages/cli/tests/commands/compile.test.ts index dc5e642..4098556 100644 --- a/packages/cli/tests/commands/compile.test.ts +++ b/packages/cli/tests/commands/compile.test.ts @@ -1,6 +1,6 @@ import { describe, it, beforeEach, afterEach } from 'node:test'; import assert from 'node:assert/strict'; -import { mkdtemp, mkdir, writeFile, rm, readFile, access } from 'node:fs/promises'; +import { mkdtemp, mkdir, writeFile, rm, readFile, readdir, access } from 'node:fs/promises'; import { join } from 'node:path'; import { tmpdir } from 'node:os'; import { executePipeline } from '../../src/commands/compile.js'; @@ -15,6 +15,25 @@ mode: copy blocks: [] `; +const COPILOT_CONFIG = `version: "0.1" +project: + name: "test-project" +tools: + - copilot +mode: copy +blocks: [] +`; + +const MIXED_CONFIG = `version: "0.1" +project: + name: "test-project" +tools: + - claude + - copilot +mode: copy +blocks: [] +`; + const VALID_RULES = `scope: conventions rules: - id: named-exports @@ -25,6 +44,26 @@ rules: content: Avoid barrel files. `; +const SECURITY_RULES = `scope: security +rules: + - id: no-eval + severity: error + content: Never use eval(). +`; + +const RULES_WITH_METADATA = `scope: conventions +globs: + - "**/*.ts" + - "**/*.tsx" +paths: + - "src/" +trigger: always +rules: + - id: named-exports + severity: error + content: Always use named exports. +`; + async function fileExists(filePath: string): Promise { try { await access(filePath); @@ -34,11 +73,13 @@ async function fileExists(filePath: string): Promise { } } -async function setupProject(tmpDir: string, config?: string, rules?: string): Promise { +async function setupProject(tmpDir: string, config?: string, ruleFiles?: Record): Promise { await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); await writeFile(join(tmpDir, '.dwf', 'config.yml'), config ?? VALID_CONFIG); - if (rules !== undefined) { - await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), rules); + if (ruleFiles) { + for (const [name, content] of Object.entries(ruleFiles)) { + await writeFile(join(tmpDir, '.dwf', 'rules', name), content); + } } } @@ -54,7 +95,7 @@ describe('executePipeline', () => { }); it('returns success results for all configured bridges', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); const result = await executePipeline({ cwd: tmpDir }); @@ -65,32 +106,52 @@ describe('executePipeline', () => { const claudeResult = result.results.find((r) => r.bridgeId === 'claude'); assert.ok(claudeResult); assert.equal(claudeResult.success, true); - assert.ok(await fileExists(join(tmpDir, 'CLAUDE.md'))); + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); const cursorResult = result.results.find((r) => r.bridgeId === 'cursor'); assert.ok(cursorResult); assert.equal(cursorResult.success, true); }); - it('tool option filters to single bridge', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + it('tool option filters bridge but still includes canonical outputs', async () => { + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); const result = await executePipeline({ cwd: tmpDir, tool: 'claude' }); const bridgeIds = new Set(result.results.map((r) => r.bridgeId)); - assert.equal(bridgeIds.size, 1); + assert.equal(bridgeIds.size, 2); assert.ok(bridgeIds.has('claude')); + assert.ok(bridgeIds.has('canonical')); + }); + + it('keeps bridge outputs when canonical write fails', async () => { + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); + + await mkdir(join(tmpDir, '.agents', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.agents', 'rules', 'devw'), 'blocking file', 'utf-8'); + + const result = await executePipeline({ cwd: tmpDir, tool: 'claude' }); + + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); + assert.ok(result.canonicalError); + + const claudeResults = result.results.filter((r) => r.bridgeId === 'claude'); + const canonicalResults = result.results.filter((r) => r.bridgeId === 'canonical'); + + assert.ok(claudeResults.every((r) => r.success)); + assert.ok(canonicalResults.length > 0); + assert.ok(canonicalResults.every((r) => !r.success)); }); it('throws on invalid tool filter', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); await assert.rejects( () => executePipeline({ cwd: tmpDir, tool: 'noexiste' }), (err: Error) => { assert.ok(err.message.includes('not configured')); return true; - } + }, ); }); @@ -100,7 +161,7 @@ describe('executePipeline', () => { (err: Error) => { assert.ok(err.message.length > 0); return true; - } + }, ); }); @@ -109,12 +170,12 @@ describe('executePipeline', () => { await writeFile(join(tmpDir, '.dwf', 'config.yml'), ':\ninvalid: [yaml: {broken'); await assert.rejects( - () => executePipeline({ cwd: tmpDir }) + () => executePipeline({ cwd: tmpDir }), ); }); it('write: false returns content without writing files', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); const result = await executePipeline({ cwd: tmpDir, tool: 'claude', write: false }); @@ -124,11 +185,11 @@ describe('executePipeline', () => { assert.ok(claudeResult.content); assert.ok(claudeResult.content.includes('named exports')); - assert.ok(!(await fileExists(join(tmpDir, 'CLAUDE.md')))); + assert.ok(!(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md')))); }); it('writes hash file on successful compile', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); await executePipeline({ cwd: tmpDir }); @@ -139,7 +200,7 @@ describe('executePipeline', () => { }); it('does not write hash when write is false', async () => { - await setupProject(tmpDir, VALID_CONFIG, VALID_RULES); + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); await executePipeline({ cwd: tmpDir, write: false }); @@ -147,3 +208,501 @@ describe('executePipeline', () => { assert.ok(!(await fileExists(hashPath))); }); }); + +describe('executePipeline DirectoryBridge multi-file output', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-dir-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('generates multiple files for multiple scopes', async () => { + await setupProject(tmpDir, VALID_CONFIG, { + 'conventions.yml': VALID_RULES, + 'security.yml': SECURITY_RULES, + }); + + const result = await executePipeline({ cwd: tmpDir, tool: 'claude' }); + + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-security.md'))); + assert.ok(await fileExists(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-conventions.md'))); + assert.ok(await fileExists(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-security.md'))); + + const claudeResults = result.results.filter((r) => r.bridgeId === 'claude'); + assert.equal(claudeResults.length, 2); + assert.equal(result.canonicalFileCount, 2); + }); + + it('creates output directories automatically', async () => { + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); + + // .claude/rules/ does not exist yet + assert.ok(!(await fileExists(join(tmpDir, '.claude', 'rules')))); + + await executePipeline({ cwd: tmpDir, tool: 'claude' }); + + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); + }); + + it('generates correct frontmatter with scope metadata', async () => { + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': RULES_WITH_METADATA }); + + await executePipeline({ cwd: tmpDir, tool: 'claude' }); + + const content = await readFile(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'), 'utf-8'); + assert.ok(content.includes('paths:')); + assert.ok(content.includes('"src/"')); + + const canonicalContent = await readFile(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-conventions.md'), 'utf-8'); + assert.ok(canonicalContent.startsWith('')); + assert.ok(!canonicalContent.startsWith('---')); + assert.ok(!canonicalContent.includes('paths:')); + }); +}); + +describe('executePipeline MarkerBridge output', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-marker-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('generates marker-based output for MarkerBridge', async () => { + await setupProject(tmpDir, COPILOT_CONFIG, { 'conventions.yml': VALID_RULES }); + + const result = await executePipeline({ cwd: tmpDir }); + + const copilotResult = result.results.find((r) => r.bridgeId === 'copilot'); + assert.ok(copilotResult); + assert.equal(copilotResult.success, true); + + const content = await readFile(join(tmpDir, '.github', 'copilot-instructions.md'), 'utf-8'); + assert.ok(content.includes('')); + assert.ok(content.includes('')); + assert.ok(content.includes('Always use named exports.')); + }); + + it('preserves existing content outside markers', async () => { + await setupProject(tmpDir, COPILOT_CONFIG, { 'conventions.yml': VALID_RULES }); + + // Pre-populate the file with user content + await mkdir(join(tmpDir, '.github'), { recursive: true }); + await writeFile( + join(tmpDir, '.github', 'copilot-instructions.md'), + '# My Custom Rules\n\nDo not touch this.\n', + 'utf-8', + ); + + await executePipeline({ cwd: tmpDir }); + + const content = await readFile(join(tmpDir, '.github', 'copilot-instructions.md'), 'utf-8'); + assert.ok(content.includes('# My Custom Rules')); + assert.ok(content.includes('Do not touch this.')); + assert.ok(content.includes('')); + }); +}); + +describe('executePipeline mixed bridges', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-mixed-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('handles both DirectoryBridge and MarkerBridge in same run', async () => { + await setupProject(tmpDir, MIXED_CONFIG, { 'conventions.yml': VALID_RULES }); + + const result = await executePipeline({ cwd: tmpDir }); + + // Claude (DirectoryBridge) should write to .claude/rules/ + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); + + // Copilot (MarkerBridge) should write to .github/copilot-instructions.md + assert.ok(await fileExists(join(tmpDir, '.github', 'copilot-instructions.md'))); + + const claudeResults = result.results.filter((r) => r.bridgeId === 'claude'); + const copilotResults = result.results.filter((r) => r.bridgeId === 'copilot'); + + assert.ok(claudeResults.length > 0); + assert.ok(copilotResults.length > 0); + assert.ok(claudeResults.every((r) => r.success)); + assert.ok(copilotResults.every((r) => r.success)); + }); +}); + +describe('executePipeline stale file cleanup', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-stale-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('removes orphaned dwf- files from previous compile', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + // Pre-populate stale file + await mkdir(join(tmpDir, '.claude', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.claude', 'rules', 'dwf-testing.md'), 'old content'); + await mkdir(join(tmpDir, '.agents', 'rules', 'devw'), { recursive: true }); + await writeFile(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-testing.md'), 'old content'); + + const result = await executePipeline({ cwd: tmpDir }); + + // New file should exist + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'))); + // Stale file should be removed + assert.ok(!(await fileExists(join(tmpDir, '.claude', 'rules', 'dwf-testing.md')))); + assert.ok(!(await fileExists(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-testing.md')))); + + // Should report stale files + assert.ok(result.staleResults.length > 0); + const claudeStale = result.staleResults.find((s) => s.bridgeId === 'claude'); + assert.ok(claudeStale); + assert.ok(claudeStale.deleted.includes('dwf-testing.md')); + }); + + it('does not touch non-dwf files', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + // Pre-populate a user file + await mkdir(join(tmpDir, '.claude', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.claude', 'rules', 'my-custom-rule.md'), 'user content'); + + await executePipeline({ cwd: tmpDir }); + + // User file should still exist + assert.ok(await fileExists(join(tmpDir, '.claude', 'rules', 'my-custom-rule.md'))); + const userContent = await readFile(join(tmpDir, '.claude', 'rules', 'my-custom-rule.md'), 'utf-8'); + assert.equal(userContent, 'user content'); + }); + + it('cleans all dwf- files when no active rules', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + const disabledRules = `scope: conventions +rules: + - id: named-exports + severity: error + content: Always use named exports. + enabled: false +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': disabledRules }); + + // Pre-populate old generated files + await mkdir(join(tmpDir, '.claude', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'), 'old content'); + + await executePipeline({ cwd: tmpDir }); + + // Should be cleaned up + const remaining = await readdir(join(tmpDir, '.claude', 'rules')); + assert.ok(!remaining.includes('dwf-conventions.md')); + }); +}); + +describe('executePipeline legacy migration', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-legacy-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('migrates legacy files on first v2 compile', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + // Create legacy files + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'old cursor content'); + + const result = await executePipeline({ cwd: tmpDir }); + + // Legacy file should be removed + assert.ok(!(await fileExists(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc')))); + // Migration actions should be reported + assert.ok(result.migration.actions.length > 0); + }); + + it('removes legacy marker block from CLAUDE.md', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + // Create CLAUDE.md with legacy markers + await writeFile( + join(tmpDir, 'CLAUDE.md'), + '# Notes\n\n\n# Rules\n\n\n# More', + 'utf-8', + ); + + const result = await executePipeline({ cwd: tmpDir }); + + // CLAUDE.md should still exist but without markers + const content = await readFile(join(tmpDir, 'CLAUDE.md'), 'utf-8'); + assert.ok(content.includes('# Notes')); + assert.ok(content.includes('# More')); + assert.ok(!content.includes('BEGIN dev-workflows')); + assert.ok(result.migration.actions.length > 0); + }); + + it('migration is idempotent when no legacy files exist', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + const result = await executePipeline({ cwd: tmpDir }); + + assert.equal(result.migration.actions.length, 0); + }); + + it('does not migrate in dry-run mode', async () => { + const claudeOnlyConfig = `version: "0.1" +project: + name: "test-project" +tools: + - claude +mode: copy +blocks: [] +`; + + await setupProject(tmpDir, claudeOnlyConfig, { 'conventions.yml': VALID_RULES }); + + // Create legacy file + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'old cursor content'); + + const result = await executePipeline({ cwd: tmpDir, write: false }); + + // Legacy file should NOT be removed in dry-run + assert.ok(await fileExists(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'))); + assert.equal(result.migration.actions.length, 0); + }); +}); + +describe('executePipeline dry-run', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-dry-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('shows files for DirectoryBridge without writing', async () => { + await setupProject(tmpDir, VALID_CONFIG, { 'conventions.yml': VALID_RULES }); + + const result = await executePipeline({ cwd: tmpDir, write: false }); + + const claudeResults = result.results.filter((r) => r.bridgeId === 'claude'); + const canonicalResults = result.results.filter((r) => r.bridgeId === 'canonical'); + assert.ok(claudeResults.length > 0); + assert.ok(canonicalResults.length > 0); + for (const r of claudeResults) { + assert.ok(r.content); + assert.ok(r.outputPath.includes('.claude/rules/')); + } + for (const r of canonicalResults) { + assert.ok(r.content); + assert.ok(r.outputPath.includes('.agents/rules/devw/')); + } + + // No files should be written + assert.ok(!(await fileExists(join(tmpDir, '.claude', 'rules')))); + assert.ok(!(await fileExists(join(tmpDir, '.agents', 'rules', 'devw')))); + }); + + it('shows files for MarkerBridge without writing', async () => { + await setupProject(tmpDir, COPILOT_CONFIG, { 'conventions.yml': VALID_RULES }); + + const result = await executePipeline({ cwd: tmpDir, write: false }); + + const copilotResults = result.results.filter((r) => r.bridgeId === 'copilot'); + assert.ok(copilotResults.length > 0); + for (const r of copilotResults) { + assert.ok(r.content); + } + + assert.ok(!(await fileExists(join(tmpDir, '.github', 'copilot-instructions.md')))); + }); +}); + +describe('executePipeline global scope integration', () => { + let tmpDir: string; + let previousHome: string | undefined; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-compile-global-')); + previousHome = process.env['HOME']; + }); + + afterEach(async () => { + if (previousHome === undefined) { + delete process.env['HOME']; + } else { + process.env['HOME'] = previousHome; + } + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('ignores global rules when project config has global: false', async () => { + const fakeHome = join(tmpDir, 'home'); + process.env['HOME'] = fakeHome; + + const projectDir = join(tmpDir, 'project'); + await setupProject( + projectDir, + `version: "0.2" +project: + name: "test-project" +tools: + - claude +mode: copy +global: false +blocks: [] +`, + { + 'conventions.yml': `scope: conventions +rules: + - id: project-rule + severity: error + content: Project rule content. +`, + }, + ); + + await mkdir(join(fakeHome, '.dwf', 'rules'), { recursive: true }); + await writeFile( + join(fakeHome, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: global-rule + severity: error + content: Global rule content. +`, + 'utf-8', + ); + + const result = await executePipeline({ cwd: projectDir, tool: 'claude' }); + + assert.equal(result.globalRuleCount, 0); + assert.equal(result.projectRuleCount, 1); + assert.equal(result.activeRuleCount, 1); + + const compiled = await readFile(join(projectDir, '.claude', 'rules', 'dwf-conventions.md'), 'utf-8'); + assert.ok(compiled.includes('Project rule content.')); + assert.ok(!compiled.includes('Global rule content.')); + }); + + it('writes native and canonical outputs to home directories in global mode', async () => { + const fakeHome = join(tmpDir, 'home'); + process.env['HOME'] = fakeHome; + + const globalDwfDir = join(fakeHome, '.dwf'); + await mkdir(join(globalDwfDir, 'rules'), { recursive: true }); + await writeFile( + join(globalDwfDir, 'config.yml'), + `version: "0.2" +project: + name: "global" +tools: + - claude +mode: copy +global: true +blocks: [] +`, + 'utf-8', + ); + await writeFile( + join(globalDwfDir, 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: global-rule + severity: error + content: Home-level global rule. +`, + 'utf-8', + ); + + await mkdir(join(fakeHome, '.claude', 'rules'), { recursive: true }); + await writeFile(join(fakeHome, '.claude', 'rules', 'dwf-testing.md'), 'stale', 'utf-8'); + + const result = await executePipeline({ cwd: globalDwfDir, tool: 'claude' }); + + assert.ok(await fileExists(join(fakeHome, '.claude', 'rules', 'dwf-conventions.md'))); + assert.ok(await fileExists(join(fakeHome, '.agents', 'rules', 'devw', 'dwf-conventions.md'))); + assert.ok(!(await fileExists(join(globalDwfDir, '.claude', 'rules', 'dwf-conventions.md')))); + assert.ok(!(await fileExists(join(fakeHome, '.claude', 'rules', 'dwf-testing.md')))); + assert.ok(result.staleResults.some((entry) => entry.bridgeId === 'claude')); + }); +}); diff --git a/packages/cli/tests/commands/doctor.test.ts b/packages/cli/tests/commands/doctor.test.ts index 5fd373d..e2311cf 100644 --- a/packages/cli/tests/commands/doctor.test.ts +++ b/packages/cli/tests/commands/doctor.test.ts @@ -12,8 +12,14 @@ import { checkBridgesAvailable, checkSymlinks, checkHashSync, + checkCanonicalExists, + checkCanonicalSync, + checkLegacyMigration, + checkNativeFrontmatter, } from '../../src/commands/doctor.js'; import { computeRulesHash, writeHash } from '../../src/core/hash.js'; +import { executePipeline } from '../../src/commands/compile.js'; +import { readConfig, readRules } from '../../src/core/parser.js'; import type { Rule, ProjectConfig } from '../../src/bridges/types.js'; const VALID_CONFIG = `version: "0.1" @@ -214,6 +220,7 @@ blocks: [] blocks: [], pulled: [], assets: [], + global: true, }; const result = checkBridgesAvailable(config); @@ -229,6 +236,7 @@ blocks: [] blocks: [], pulled: [], assets: [], + global: true, }; const result = checkBridgesAvailable(config); @@ -244,6 +252,7 @@ blocks: [] blocks: [], pulled: [], assets: [], + global: true, }; const result = checkBridgesAvailable(config); @@ -262,6 +271,7 @@ blocks: [] blocks: [], pulled: [], assets: [], + global: true, }; const result = await checkSymlinks(tmpDir, config); @@ -273,18 +283,20 @@ blocks: [] const config: ProjectConfig = { version: '0.1', project: { name: 'test' }, - tools: ['claude'], + tools: ['copilot'], mode: 'link', blocks: [], pulled: [], assets: [], + global: true, }; // Create a target file and a symlink pointing to it - const targetPath = join(tmpDir, '.dwf', '.cache', 'CLAUDE.md'); - await mkdir(join(tmpDir, '.dwf', '.cache'), { recursive: true }); + await mkdir(join(tmpDir, '.dwf', '.cache', '.github'), { recursive: true }); + await mkdir(join(tmpDir, '.github'), { recursive: true }); + const targetPath = join(tmpDir, '.dwf', '.cache', '.github', 'copilot-instructions.md'); await writeFile(targetPath, 'content'); - await symlink(targetPath, join(tmpDir, 'CLAUDE.md')); + await symlink(targetPath, join(tmpDir, '.github', 'copilot-instructions.md')); const result = await checkSymlinks(tmpDir, config); assert.equal(result.passed, true); @@ -295,20 +307,22 @@ blocks: [] const config: ProjectConfig = { version: '0.1', project: { name: 'test' }, - tools: ['claude'], + tools: ['copilot'], mode: 'link', blocks: [], pulled: [], assets: [], + global: true, }; // Create a symlink pointing to a non-existent target - const brokenTarget = join(tmpDir, '.dwf', '.cache', 'CLAUDE.md'); - await symlink(brokenTarget, join(tmpDir, 'CLAUDE.md')); + await mkdir(join(tmpDir, '.github'), { recursive: true }); + const brokenTarget = join(tmpDir, '.dwf', '.cache', '.github', 'copilot-instructions.md'); + await symlink(brokenTarget, join(tmpDir, '.github', 'copilot-instructions.md')); const result = await checkSymlinks(tmpDir, config); assert.equal(result.passed, false); - assert.ok(result.message.includes('CLAUDE.md')); + assert.ok(result.message.includes('copilot-instructions.md')); }); }); @@ -339,6 +353,114 @@ blocks: [] assert.ok(result.message.includes('out of sync')); }); }); + + describe('checkCanonicalExists', () => { + it('fails when canonical directory does not exist', async () => { + const result = await checkCanonicalExists(tmpDir); + assert.equal(result.passed, false); + assert.ok(result.message.includes('.agents/rules/devw')); + }); + + it('passes when canonical files exist', async () => { + await mkdir(join(tmpDir, '.agents', 'rules', 'devw'), { recursive: true }); + await writeFile(join(tmpDir, '.agents', 'rules', 'devw', 'dwf-conventions.md'), 'content', 'utf-8'); + + const result = await checkCanonicalExists(tmpDir); + assert.equal(result.passed, true); + assert.ok(result.message.includes('1 file')); + }); + }); + + describe('checkCanonicalSync', () => { + it('passes when canonical and native files are aligned', async () => { + await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), VALID_CONFIG); + await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), VALID_RULES); + + await executePipeline({ cwd: tmpDir }); + + const config = await readConfig(tmpDir); + const rules = await readRules(tmpDir); + const result = await checkCanonicalSync(tmpDir, rules, config); + + assert.equal(result.passed, true); + assert.ok(result.message.includes('in sync')); + }); + + it('fails when native file was manually edited', async () => { + await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), VALID_CONFIG); + await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), VALID_RULES); + + await executePipeline({ cwd: tmpDir }); + await writeFile( + join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'), + '\n# Conventions\n\n- Tampered content\n', + 'utf-8', + ); + + const config = await readConfig(tmpDir); + const rules = await readRules(tmpDir); + const result = await checkCanonicalSync(tmpDir, rules, config); + + assert.equal(result.passed, false); + assert.ok(result.message.includes('Canonical/native mismatch')); + }); + }); + + describe('checkLegacyMigration', () => { + it('passes when no legacy files are present', async () => { + const result = await checkLegacyMigration(tmpDir); + assert.equal(result.passed, true); + }); + + it('fails when legacy files still exist', async () => { + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'legacy', 'utf-8'); + + const result = await checkLegacyMigration(tmpDir); + assert.equal(result.passed, false); + assert.ok(result.message.includes('devworkflows.mdc')); + }); + }); + + describe('checkNativeFrontmatter', () => { + it('passes with valid native frontmatter', async () => { + await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), VALID_CONFIG); + await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), VALID_RULES); + + await executePipeline({ cwd: tmpDir }); + + const config = await readConfig(tmpDir); + const result = await checkNativeFrontmatter(tmpDir, config); + assert.equal(result.passed, true); + }); + + it('fails with invalid YAML frontmatter', async () => { + const config: ProjectConfig = { + version: '0.1', + project: { name: 'test' }, + tools: ['cursor'], + mode: 'copy', + blocks: [], + pulled: [], + assets: [], + global: true, + }; + + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile( + join(tmpDir, '.cursor', 'rules', 'dwf-conventions.mdc'), + '---\ndescription: broken: yaml\n---\n\n', + 'utf-8', + ); + + const result = await checkNativeFrontmatter(tmpDir, config); + assert.equal(result.passed, false); + assert.ok(result.message.includes('invalid YAML frontmatter')); + }); + }); }); describe('computeRulesHash', () => { diff --git a/packages/cli/tests/commands/explain.test.ts b/packages/cli/tests/commands/explain.test.ts index 8c66bc8..421e570 100644 --- a/packages/cli/tests/commands/explain.test.ts +++ b/packages/cli/tests/commands/explain.test.ts @@ -75,9 +75,9 @@ describe('devw explain', () => { assert.equal(result.exitCode, 0); assert.ok(result.stdout.includes('claude')); - assert.ok(result.stdout.includes('CLAUDE.md')); + assert.ok(result.stdout.includes('.claude/rules/dwf-')); assert.ok(result.stdout.includes('Rules:')); - assert.ok(result.stdout.includes('architecture:')); + assert.ok(result.stdout.includes('multi-file')); }); it('shows excluded rules with reasons', async () => { @@ -102,7 +102,7 @@ describe('devw explain', () => { assert.equal(result.exitCode, 0); assert.ok(result.stdout.includes('windsurf')); - assert.ok(result.stdout.includes('.windsurf/rules/devworkflows.md')); + assert.ok(result.stdout.includes('.windsurf/rules/dwf-')); assert.ok(!result.stdout.includes('claude')); }); diff --git a/packages/cli/tests/commands/init.test.ts b/packages/cli/tests/commands/init.test.ts new file mode 100644 index 0000000..f5438ca --- /dev/null +++ b/packages/cli/tests/commands/init.test.ts @@ -0,0 +1,75 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, readFile, access, mkdir } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { runInit } from '../../src/commands/init.js'; + +async function fileExists(filePath: string): Promise { + try { + await access(filePath); + return true; + } catch { + return false; + } +} + +describe('runInit', () => { + let tmpDir: string; + let projectDir: string; + let fakeHome: string; + let previousCwd: string; + let previousHome: string | undefined; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-init-')); + projectDir = join(tmpDir, 'project'); + fakeHome = join(tmpDir, 'home'); + previousCwd = process.cwd(); + previousHome = process.env['HOME']; + + await rm(projectDir, { recursive: true, force: true }); + await rm(fakeHome, { recursive: true, force: true }); + await Promise.all([ + mkdir(projectDir, { recursive: true }), + mkdir(fakeHome, { recursive: true }), + ]); + + process.env['HOME'] = fakeHome; + process.chdir(projectDir); + process.exitCode = 0; + }); + + afterEach(async () => { + process.chdir(previousCwd); + if (previousHome === undefined) { + delete process.env['HOME']; + } else { + process.env['HOME'] = previousHome; + } + process.exitCode = 0; + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('initializes project mode by default with -y', async () => { + await runInit({ tools: 'claude', mode: 'copy', yes: true }); + + assert.ok(await fileExists(join(projectDir, '.dwf', 'config.yml'))); + assert.ok(await fileExists(join(projectDir, '.dwf', 'rules', 'conventions.yml'))); + assert.ok(!(await fileExists(join(fakeHome, '.dwf', 'config.yml')))); + }); + + it('initializes global mode with --global and creates canonical directory', async () => { + await runInit({ global: true, tools: 'claude', mode: 'copy', yes: true }); + + const globalConfigPath = join(fakeHome, '.dwf', 'config.yml'); + assert.ok(await fileExists(globalConfigPath)); + assert.ok(await fileExists(join(fakeHome, '.dwf', 'rules', 'conventions.yml'))); + assert.ok(await fileExists(join(fakeHome, '.agents', 'rules', 'devw'))); + assert.ok(!(await fileExists(join(projectDir, '.dwf', 'config.yml')))); + + const config = await readFile(globalConfigPath, 'utf-8'); + assert.ok(config.includes('version: "0.2"')); + assert.ok(config.includes('global: true')); + }); +}); diff --git a/packages/cli/tests/commands/menu.test.ts b/packages/cli/tests/commands/menu.test.ts index c37d89e..c1e9d48 100644 --- a/packages/cli/tests/commands/menu.test.ts +++ b/packages/cli/tests/commands/menu.test.ts @@ -15,10 +15,12 @@ function makeMockCommand(): { helpCalled: boolean; help: () => void } { describe('runMainMenu — TTY guard', () => { let originalStdoutIsTTY: boolean | undefined; let originalStdinIsTTY: boolean | undefined; + let originalCI: string | undefined; beforeEach(() => { originalStdoutIsTTY = process.stdout.isTTY; originalStdinIsTTY = process.stdin.isTTY; + originalCI = process.env['CI']; }); afterEach(() => { @@ -33,6 +35,12 @@ describe('runMainMenu — TTY guard', () => { writable: true, configurable: true, }); + + if (originalCI === undefined) { + delete process.env['CI']; + } else { + process.env['CI'] = originalCI; + } }); it('calls command.help() when stdout is not a TTY', async () => { @@ -71,4 +79,23 @@ describe('runMainMenu — TTY guard', () => { assert.equal(mockCommand.helpCalled, true); }); + + it('calls command.help() when CI mode is enabled', async () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + process.env['CI'] = 'true'; + + const mockCommand = makeMockCommand(); + await runMainMenu(mockCommand as unknown as import('commander').Command); + + assert.equal(mockCommand.helpCalled, true); + }); }); diff --git a/packages/cli/tests/core/assets.test.ts b/packages/cli/tests/core/assets.test.ts index d2c02e5..80c290c 100644 --- a/packages/cli/tests/core/assets.test.ts +++ b/packages/cli/tests/core/assets.test.ts @@ -24,6 +24,7 @@ const CONFIG: ProjectConfig = { blocks: [], pulled: [], assets: [], + global: true, }; let tmpDir: string; diff --git a/packages/cli/tests/core/canonical.test.ts b/packages/cli/tests/core/canonical.test.ts new file mode 100644 index 0000000..fc3117f --- /dev/null +++ b/packages/cli/tests/core/canonical.test.ts @@ -0,0 +1,93 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { access, mkdtemp, mkdir, readFile, rm, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { buildCanonicalMarkdown, writeCanonical } from '../../src/core/canonical.js'; +import type { Rule } from '../../src/bridges/types.js'; + +async function fileExists(path: string): Promise { + try { + await access(path); + return true; + } catch { + return false; + } +} + +function makeRule(overrides: Partial = {}): Rule { + return { + id: 'test-rule', + scope: 'conventions', + severity: 'error', + content: 'Always use named exports.', + enabled: true, + ...overrides, + }; +} + +describe('canonical writer', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-canonical-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + describe('buildCanonicalMarkdown', () => { + it('produces deterministic markdown without frontmatter', () => { + const rules = [ + makeRule({ id: 'named-exports', content: 'Always use named exports.' }), + makeRule({ id: 'explicit-return', content: 'Declare return types.\nNever use implicit any.' }), + ]; + + const outputA = buildCanonicalMarkdown('conventions', rules); + const outputB = buildCanonicalMarkdown('conventions', rules); + + assert.equal(outputA, outputB); + assert.ok(outputA.startsWith('')); + assert.ok(outputA.includes('# Conventions')); + assert.ok(outputA.includes('- Always use named exports.')); + assert.ok(outputA.includes('- Declare return types.')); + assert.ok(outputA.includes(' Never use implicit any.')); + assert.ok(!outputA.includes(' \n')); + assert.ok(!outputA.startsWith('---')); + assert.ok(!outputA.includes('paths:')); + assert.ok(!outputA.includes('globs:')); + }); + }); + + describe('writeCanonical', () => { + it('writes canonical files and removes stale dwf files only', async () => { + const canonicalDir = join(tmpDir, '.agents', 'rules', 'devw'); + await mkdir(canonicalDir, { recursive: true }); + await writeFile(join(canonicalDir, 'dwf-old-scope.md'), 'stale', 'utf-8'); + await writeFile(join(canonicalDir, 'my-custom-notes.md'), 'keep me', 'utf-8'); + + const rules = [ + makeRule({ id: 'named-exports', scope: 'conventions' }), + makeRule({ id: 'no-eval', scope: 'security', content: 'Never use eval().' }), + makeRule({ id: 'info-rule', scope: 'architecture', severity: 'info', content: 'Informational only.' }), + ]; + + const written = await writeCanonical(tmpDir, rules); + + assert.deepEqual(written, [ + '.agents/rules/devw/dwf-conventions.md', + '.agents/rules/devw/dwf-security.md', + ]); + + assert.equal(await fileExists(join(canonicalDir, 'dwf-conventions.md')), true); + assert.equal(await fileExists(join(canonicalDir, 'dwf-security.md')), true); + assert.equal(await fileExists(join(canonicalDir, 'dwf-old-scope.md')), false); + assert.equal(await fileExists(join(canonicalDir, 'my-custom-notes.md')), true); + + const conventions = await readFile(join(canonicalDir, 'dwf-conventions.md'), 'utf-8'); + assert.ok(conventions.includes('# Conventions')); + assert.ok(!conventions.startsWith('---')); + }); + }); +}); diff --git a/packages/cli/tests/core/cleanup.test.ts b/packages/cli/tests/core/cleanup.test.ts new file mode 100644 index 0000000..e85a999 --- /dev/null +++ b/packages/cli/tests/core/cleanup.test.ts @@ -0,0 +1,299 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, mkdir, writeFile, readFile, readdir } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { + detectLegacyFiles, + migrateLegacyFiles, + removeLegacyMarkerBlock, +} from '../../src/core/cleanup.js'; +import type { LegacyFile } from '../../src/core/cleanup.js'; + +describe('detectLegacyFiles', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-cleanup-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('detects legacy .cursor/rules/devworkflows.mdc', async () => { + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'old cursor content'); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 1); + assert.equal(legacy[0]?.type, 'full-file'); + assert.equal(legacy[0]?.bridgeId, 'cursor'); + }); + + it('detects legacy .windsurf/rules/devworkflows.md', async () => { + await mkdir(join(tmpDir, '.windsurf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.windsurf', 'rules', 'devworkflows.md'), 'old windsurf content'); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 1); + assert.equal(legacy[0]?.type, 'full-file'); + assert.equal(legacy[0]?.bridgeId, 'windsurf'); + }); + + it('detects CLAUDE.md with dev-workflows markers', async () => { + const claudeContent = [ + '# My Notes', + '', + '', + '# Project Rules', + '', + '', + '# Other stuff', + ].join('\n'); + await writeFile(join(tmpDir, 'CLAUDE.md'), claudeContent); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 1); + assert.equal(legacy[0]?.type, 'marker'); + assert.equal(legacy[0]?.bridgeId, 'claude'); + }); + + it('does NOT detect CLAUDE.md without markers', async () => { + await writeFile(join(tmpDir, 'CLAUDE.md'), '# Just a normal CLAUDE.md'); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 0); + }); + + it('does NOT detect GEMINI.md as legacy', async () => { + const geminiContent = [ + '', + '# Rules', + '', + ].join('\n'); + await writeFile(join(tmpDir, 'GEMINI.md'), geminiContent); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 0); + }); + + it('does NOT detect .github/copilot-instructions.md as legacy', async () => { + await mkdir(join(tmpDir, '.github'), { recursive: true }); + const copilotContent = [ + '', + '# Rules', + '', + ].join('\n'); + await writeFile(join(tmpDir, '.github', 'copilot-instructions.md'), copilotContent); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 0); + }); + + it('detects multiple legacy files at once', async () => { + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await mkdir(join(tmpDir, '.windsurf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'cursor'); + await writeFile(join(tmpDir, '.windsurf', 'rules', 'devworkflows.md'), 'windsurf'); + await writeFile( + join(tmpDir, 'CLAUDE.md'), + '\n# Rules\n', + ); + + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 3); + const bridgeIds = legacy.map((l) => l.bridgeId).sort(); + assert.deepEqual(bridgeIds, ['claude', 'cursor', 'windsurf']); + }); + + it('returns empty array when no legacy files exist', async () => { + const legacy = await detectLegacyFiles(tmpDir); + + assert.equal(legacy.length, 0); + }); +}); + +describe('migrateLegacyFiles', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-migrate-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('deletes full-file legacy files', async () => { + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'old content'); + + const legacyFiles: LegacyFile[] = [{ + path: join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), + type: 'full-file', + bridgeId: 'cursor', + }]; + + const actions = await migrateLegacyFiles(tmpDir, legacyFiles); + + assert.equal(actions.length, 1); + assert.ok(actions[0]?.includes('Removed legacy')); + + const remaining = await readdir(join(tmpDir, '.cursor', 'rules')); + assert.equal(remaining.length, 0); + }); + + it('removes marker block from CLAUDE.md preserving manual content', async () => { + const claudeContent = [ + '# My Custom Notes', + '', + '', + '# Project Rules', + '', + '## Architecture', + '', + '- Use named exports.', + '', + '', + '# Other important stuff', + ].join('\n'); + await writeFile(join(tmpDir, 'CLAUDE.md'), claudeContent); + + const legacyFiles: LegacyFile[] = [{ + path: join(tmpDir, 'CLAUDE.md'), + type: 'marker', + bridgeId: 'claude', + }]; + + const actions = await migrateLegacyFiles(tmpDir, legacyFiles); + + assert.equal(actions.length, 1); + assert.ok(actions[0]?.includes('Removed devw block')); + + const content = await readFile(join(tmpDir, 'CLAUDE.md'), 'utf-8'); + assert.ok(content.includes('# My Custom Notes')); + assert.ok(content.includes('# Other important stuff')); + assert.ok(!content.includes('BEGIN dev-workflows')); + assert.ok(!content.includes('END dev-workflows')); + assert.ok(!content.includes('Use named exports')); + }); + + it('deletes CLAUDE.md if it becomes empty after marker removal', async () => { + const claudeContent = [ + '', + '# Project Rules', + '', + ].join('\n'); + await writeFile(join(tmpDir, 'CLAUDE.md'), claudeContent); + + const legacyFiles: LegacyFile[] = [{ + path: join(tmpDir, 'CLAUDE.md'), + type: 'marker', + bridgeId: 'claude', + }]; + + const actions = await migrateLegacyFiles(tmpDir, legacyFiles); + + assert.equal(actions.length, 1); + + const entries = await readdir(tmpDir); + assert.ok(!entries.includes('CLAUDE.md')); + }); + + it('is idempotent — skips files that do not exist', async () => { + const legacyFiles: LegacyFile[] = [{ + path: join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), + type: 'full-file', + bridgeId: 'cursor', + }]; + + const actions = await migrateLegacyFiles(tmpDir, legacyFiles); + + assert.equal(actions.length, 0); + }); + + it('handles mixed legacy file types', async () => { + await mkdir(join(tmpDir, '.cursor', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), 'old cursor'); + await writeFile( + join(tmpDir, 'CLAUDE.md'), + '# Notes\n\n\nRules\n\n\n# More', + ); + + const legacyFiles: LegacyFile[] = [ + { + path: join(tmpDir, '.cursor', 'rules', 'devworkflows.mdc'), + type: 'full-file', + bridgeId: 'cursor', + }, + { + path: join(tmpDir, 'CLAUDE.md'), + type: 'marker', + bridgeId: 'claude', + }, + ]; + + const actions = await migrateLegacyFiles(tmpDir, legacyFiles); + + assert.equal(actions.length, 2); + }); +}); + +describe('removeLegacyMarkerBlock', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-marker-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('removes marker block and preserves surrounding content', async () => { + const filePath = join(tmpDir, 'test.md'); + await writeFile(filePath, '# Before\n\n\nRules\n\n\n# After'); + + const result = await removeLegacyMarkerBlock(filePath); + + assert.equal(result, true); + const content = await readFile(filePath, 'utf-8'); + assert.ok(content.includes('# Before')); + assert.ok(content.includes('# After')); + assert.ok(!content.includes('BEGIN dev-workflows')); + }); + + it('returns false if file does not exist', async () => { + const result = await removeLegacyMarkerBlock(join(tmpDir, 'nonexistent.md')); + + assert.equal(result, false); + }); + + it('returns false if file has no markers', async () => { + const filePath = join(tmpDir, 'test.md'); + await writeFile(filePath, '# Just normal content'); + + const result = await removeLegacyMarkerBlock(filePath); + + assert.equal(result, false); + }); + + it('deletes file if it becomes empty after removal', async () => { + const filePath = join(tmpDir, 'test.md'); + await writeFile(filePath, '\nRules\n'); + + const result = await removeLegacyMarkerBlock(filePath); + + assert.equal(result, true); + const entries = await readdir(tmpDir); + assert.ok(!entries.includes('test.md')); + }); +}); diff --git a/packages/cli/tests/core/merge.test.ts b/packages/cli/tests/core/merge.test.ts new file mode 100644 index 0000000..c30095a --- /dev/null +++ b/packages/cli/tests/core/merge.test.ts @@ -0,0 +1,55 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import type { Rule } from '../../src/bridges/types.js'; +import { mergeRules } from '../../src/core/merge.js'; + +function makeRule(id: string, content: string): Rule { + return { + id, + scope: 'conventions', + severity: 'error', + content, + enabled: true, + }; +} + +describe('mergeRules', () => { + it('includes global and project rules when there are no conflicts', () => { + const globalRules = [ + makeRule('g1', 'global one'), + makeRule('g2', 'global two'), + ]; + const projectRules = [ + makeRule('p1', 'project one'), + makeRule('p2', 'project two'), + ]; + + const merged = mergeRules(globalRules, projectRules); + + assert.deepEqual(merged.map((rule) => rule.id), ['g1', 'g2', 'p1', 'p2']); + }); + + it('prefers project rules when IDs conflict', () => { + const globalRules = [ + makeRule('strict-types', 'global version'), + makeRule('g2', 'global two'), + ]; + const projectRules = [ + makeRule('strict-types', 'project version'), + makeRule('p2', 'project two'), + ]; + + const merged = mergeRules(globalRules, projectRules); + + assert.equal(merged.length, 3); + const strictTypes = merged.find((rule) => rule.id === 'strict-types'); + assert.ok(strictTypes); + assert.equal(strictTypes.content, 'project version'); + }); + + it('handles empty arrays', () => { + assert.deepEqual(mergeRules([], []), []); + assert.deepEqual(mergeRules([makeRule('g1', 'global')], []).map((rule) => rule.id), ['g1']); + assert.deepEqual(mergeRules([], [makeRule('p1', 'project')]).map((rule) => rule.id), ['p1']); + }); +}); diff --git a/packages/cli/tests/core/parser.test.ts b/packages/cli/tests/core/parser.test.ts new file mode 100644 index 0000000..f92d930 --- /dev/null +++ b/packages/cli/tests/core/parser.test.ts @@ -0,0 +1,307 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, mkdir, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { readConfig, readRules } from '../../src/core/parser.js'; + +const BASE_CONFIG = `version: "0.1" +project: + name: test-project +tools: + - claude +mode: copy +blocks: []`; + +const CONFIG_V02 = `version: "0.2" +project: + name: test-project +tools: + - claude +mode: copy +blocks: [] +global: false`; + +async function createProject(dir: string, configYaml: string): Promise { + await mkdir(join(dir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(dir, '.dwf', 'config.yml'), configYaml, 'utf-8'); +} + +describe('readRules scope metadata parsing', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-parser-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('parses YAML with top-level metadata fields', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +globs: + - "**/*.ts" + - "**/*.tsx" +paths: + - "src/" +trigger: always +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + assert.ok(rules[0]?.metadata); + assert.deepEqual(rules[0]?.metadata?.globs, ['**/*.ts', '**/*.tsx']); + assert.deepEqual(rules[0]?.metadata?.paths, ['src/']); + assert.equal(rules[0]?.metadata?.trigger, 'always'); + }); + + it('parses YAML with nested metadata block', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +metadata: + globs: + - "**/*.ts" + trigger: glob +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + assert.ok(rules[0]?.metadata); + assert.deepEqual(rules[0]?.metadata?.globs, ['**/*.ts']); + assert.equal(rules[0]?.metadata?.trigger, 'glob'); + }); + + it('parses YAML without metadata block (backward compat)', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + assert.equal(rules[0]?.metadata, undefined); + }); + + it('rejects invalid trigger value with warning', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +trigger: invalid +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + // Metadata should be undefined because validation failed + assert.equal(rules[0]?.metadata, undefined); + }); + + it('rejects non-array globs with warning', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +globs: "**/*.ts" +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + // Metadata should be undefined because validation failed + assert.equal(rules[0]?.metadata, undefined); + }); + + it('rejects non-array paths with warning', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +paths: "src/" +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + assert.equal(rules[0]?.metadata, undefined); + }); + + it('attaches same metadata to all rules in the scope', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +globs: + - "**/*.ts" +trigger: always +rules: + - id: named-exports + severity: error + content: Always use named exports. + - id: no-barrel + severity: warning + content: Avoid barrel files. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 2); + assert.deepEqual(rules[0]?.metadata?.globs, ['**/*.ts']); + assert.equal(rules[0]?.metadata?.trigger, 'always'); + assert.deepEqual(rules[1]?.metadata?.globs, ['**/*.ts']); + assert.equal(rules[1]?.metadata?.trigger, 'always'); + }); + + it('top-level fields take precedence over nested metadata', async () => { + await createProject(tmpDir, BASE_CONFIG); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +metadata: + trigger: glob +trigger: always +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const rules = await readRules(tmpDir); + + assert.equal(rules.length, 1); + assert.equal(rules[0]?.metadata?.trigger, 'always'); + }); +}); + +describe('readConfig version handling', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-config-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('reads v0.1 config with global defaulting to true', async () => { + await createProject(tmpDir, BASE_CONFIG); + + const config = await readConfig(tmpDir); + + assert.equal(config.version, '0.1'); + assert.equal(config.global, true); + }); + + it('reads v0.2 config with explicit global: false', async () => { + await createProject(tmpDir, CONFIG_V02); + + const config = await readConfig(tmpDir); + + assert.equal(config.version, '0.2'); + assert.equal(config.global, false); + }); + + it('reads v0.2 config with global: true', async () => { + const configWithGlobalTrue = `version: "0.2" +project: + name: test-project +tools: + - claude +mode: copy +blocks: [] +global: true`; + + await createProject(tmpDir, configWithGlobalTrue); + + const config = await readConfig(tmpDir); + + assert.equal(config.version, '0.2'); + assert.equal(config.global, true); + }); + + it('rejects unsupported config version', async () => { + const badConfig = `version: "99.0" +project: + name: test-project +tools: + - claude +mode: copy +blocks: []`; + + await createProject(tmpDir, badConfig); + + await assert.rejects( + () => readConfig(tmpDir), + (err: Error) => { + assert.ok(err.message.includes('unsupported version')); + assert.ok(err.message.includes('99.0')); + return true; + }, + ); + }); + + it('defaults to v0.1 when version is missing', async () => { + const noVersion = `project: + name: test-project +tools: + - claude +mode: copy +blocks: []`; + + await createProject(tmpDir, noVersion); + + const config = await readConfig(tmpDir); + assert.equal(config.version, '0.1'); + }); +}); diff --git a/packages/cli/tests/core/schema.test.ts b/packages/cli/tests/core/schema.test.ts index d100a2f..a16082e 100644 --- a/packages/cli/tests/core/schema.test.ts +++ b/packages/cli/tests/core/schema.test.ts @@ -1,6 +1,15 @@ import { describe, it } from 'node:test'; import assert from 'node:assert/strict'; -import { isValidScope, isBuiltinScope, BUILTIN_SCOPES, VALID_TOOL_IDS } from '../../src/core/schema.js'; +import { + isValidScope, + isBuiltinScope, + isValidTrigger, + BUILTIN_SCOPES, + VALID_TOOL_IDS, + VALID_TRIGGERS, + VALID_CONFIG_VERSIONS, + validateScopeMetadata, +} from '../../src/core/schema.js'; describe('isValidScope', () => { it('accepts built-in scopes', () => { @@ -81,3 +90,104 @@ describe('VALID_TOOL_IDS', () => { assert.deepEqual([...VALID_TOOL_IDS], ['claude', 'cursor', 'gemini', 'windsurf', 'copilot']); }); }); + +describe('VALID_CONFIG_VERSIONS', () => { + it('contains 0.1 and 0.2', () => { + assert.deepEqual([...VALID_CONFIG_VERSIONS], ['0.1', '0.2']); + }); +}); + +describe('VALID_TRIGGERS', () => { + it('contains always, glob, and manual', () => { + assert.deepEqual([...VALID_TRIGGERS], ['always', 'glob', 'manual']); + }); +}); + +describe('isValidTrigger', () => { + it('returns true for valid triggers', () => { + assert.equal(isValidTrigger('always'), true); + assert.equal(isValidTrigger('glob'), true); + assert.equal(isValidTrigger('manual'), true); + }); + + it('returns false for invalid triggers', () => { + assert.equal(isValidTrigger('invalid'), false); + assert.equal(isValidTrigger('auto'), false); + assert.equal(isValidTrigger(''), false); + }); +}); + +describe('validateScopeMetadata', () => { + it('returns valid metadata with globs array', () => { + const result = validateScopeMetadata({ globs: ['**/*.ts', '**/*.tsx'] }); + assert.equal(result.errors.length, 0); + assert.ok(result.metadata); + assert.deepEqual(result.metadata?.globs, ['**/*.ts', '**/*.tsx']); + }); + + it('returns valid metadata with paths array', () => { + const result = validateScopeMetadata({ paths: ['src/', 'lib/'] }); + assert.equal(result.errors.length, 0); + assert.ok(result.metadata); + assert.deepEqual(result.metadata?.paths, ['src/', 'lib/']); + }); + + it('returns valid metadata with trigger value', () => { + const result = validateScopeMetadata({ trigger: 'always' }); + assert.equal(result.errors.length, 0); + assert.ok(result.metadata); + assert.equal(result.metadata?.trigger, 'always'); + }); + + it('returns undefined metadata for empty input', () => { + const result = validateScopeMetadata({}); + assert.equal(result.errors.length, 0); + assert.equal(result.metadata, undefined); + }); + + it('rejects non-array globs', () => { + const result = validateScopeMetadata({ globs: '**/*.ts' }); + assert.ok(result.errors.length > 0); + assert.ok(result.errors[0]?.field === 'globs'); + }); + + it('rejects non-string items in globs array', () => { + const result = validateScopeMetadata({ globs: [123, true] }); + assert.ok(result.errors.length > 0); + assert.ok(result.errors[0]?.field === 'globs'); + }); + + it('rejects non-array paths', () => { + const result = validateScopeMetadata({ paths: 'src/' }); + assert.ok(result.errors.length > 0); + assert.ok(result.errors[0]?.field === 'paths'); + }); + + it('rejects invalid trigger value', () => { + const result = validateScopeMetadata({ trigger: 'invalid' }); + assert.ok(result.errors.length > 0); + assert.ok(result.errors[0]?.field === 'trigger'); + assert.ok(result.errors[0]?.message.includes('invalid')); + }); + + it('validates all fields together', () => { + const result = validateScopeMetadata({ + globs: ['**/*.ts'], + paths: ['src/'], + trigger: 'glob', + }); + assert.equal(result.errors.length, 0); + assert.ok(result.metadata); + assert.deepEqual(result.metadata?.globs, ['**/*.ts']); + assert.deepEqual(result.metadata?.paths, ['src/']); + assert.equal(result.metadata?.trigger, 'glob'); + }); + + it('reports multiple errors at once', () => { + const result = validateScopeMetadata({ + globs: 'not-an-array', + trigger: 'invalid', + }); + assert.equal(result.errors.length, 2); + }); +}); diff --git a/packages/cli/tests/core/scope-filename.test.ts b/packages/cli/tests/core/scope-filename.test.ts new file mode 100644 index 0000000..e60a1aa --- /dev/null +++ b/packages/cli/tests/core/scope-filename.test.ts @@ -0,0 +1,149 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, writeFile, readdir } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { scopeToFilename, cleanStaleFiles } from '../../src/core/scope-filename.js'; + +describe('scopeToFilename', () => { + it('converts built-in scope "conventions"', () => { + const result = scopeToFilename('conventions', 'dwf-', '.md'); + assert.equal(result, 'dwf-conventions.md'); + }); + + it('converts built-in scope "security"', () => { + const result = scopeToFilename('security', 'dwf-', '.md'); + assert.equal(result, 'dwf-security.md'); + }); + + it('converts built-in scope "testing"', () => { + const result = scopeToFilename('testing', 'dwf-', '.mdc'); + assert.equal(result, 'dwf-testing.mdc'); + }); + + it('converts built-in scope "architecture"', () => { + const result = scopeToFilename('architecture', 'dwf-', '.md'); + assert.equal(result, 'dwf-architecture.md'); + }); + + it('converts built-in scope "workflow"', () => { + const result = scopeToFilename('workflow', 'dwf-', '.md'); + assert.equal(result, 'dwf-workflow.md'); + }); + + it('replaces single colon in custom scope', () => { + const result = scopeToFilename('team:payments', 'dwf-', '.md'); + assert.equal(result, 'dwf-team-payments.md'); + }); + + it('replaces multiple colons in custom scope', () => { + const result = scopeToFilename('team:payments:billing', 'dwf-', '.md'); + assert.equal(result, 'dwf-team-payments-billing.md'); + }); + + it('works with .mdc extension for cursor', () => { + const result = scopeToFilename('team:payments', 'dwf-', '.mdc'); + assert.equal(result, 'dwf-team-payments.mdc'); + }); + + it('works with .instructions.md extension', () => { + const result = scopeToFilename('security', 'dwf-', '.instructions.md'); + assert.equal(result, 'dwf-security.instructions.md'); + }); + + it('handles scope with no colon', () => { + const result = scopeToFilename('conventions', 'dwf-', '.md'); + assert.equal(result, 'dwf-conventions.md'); + }); +}); + +describe('cleanStaleFiles', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-stale-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('removes orphaned files with matching prefix and extension', async () => { + await writeFile(join(tmpDir, 'dwf-conventions.md'), 'content'); + await writeFile(join(tmpDir, 'dwf-security.md'), 'content'); + await writeFile(join(tmpDir, 'dwf-testing.md'), 'content'); + + const currentFiles = new Set(['dwf-conventions.md', 'dwf-security.md']); + const deleted = await cleanStaleFiles(tmpDir, 'dwf-', '.md', currentFiles); + + assert.deepEqual(deleted, ['dwf-testing.md']); + + const remaining = await readdir(tmpDir); + assert.ok(remaining.includes('dwf-conventions.md')); + assert.ok(remaining.includes('dwf-security.md')); + assert.ok(!remaining.includes('dwf-testing.md')); + }); + + it('preserves current files', async () => { + await writeFile(join(tmpDir, 'dwf-conventions.md'), 'content'); + await writeFile(join(tmpDir, 'dwf-security.md'), 'content'); + + const currentFiles = new Set(['dwf-conventions.md', 'dwf-security.md']); + const deleted = await cleanStaleFiles(tmpDir, 'dwf-', '.md', currentFiles); + + assert.deepEqual(deleted, []); + + const remaining = await readdir(tmpDir); + assert.equal(remaining.length, 2); + }); + + it('ignores non-matching files (no prefix match)', async () => { + await writeFile(join(tmpDir, 'dwf-conventions.md'), 'content'); + await writeFile(join(tmpDir, 'my-custom-rule.md'), 'user content'); + await writeFile(join(tmpDir, 'README.md'), 'readme'); + + const currentFiles = new Set(['dwf-conventions.md']); + const deleted = await cleanStaleFiles(tmpDir, 'dwf-', '.md', currentFiles); + + assert.deepEqual(deleted, []); + + const remaining = await readdir(tmpDir); + assert.ok(remaining.includes('my-custom-rule.md')); + assert.ok(remaining.includes('README.md')); + assert.ok(remaining.includes('dwf-conventions.md')); + }); + + it('ignores files with wrong extension', async () => { + await writeFile(join(tmpDir, 'dwf-conventions.md'), 'content'); + await writeFile(join(tmpDir, 'dwf-conventions.mdc'), 'cursor content'); + + const currentFiles = new Set(['dwf-conventions.md']); + const deleted = await cleanStaleFiles(tmpDir, 'dwf-', '.md', currentFiles); + + assert.deepEqual(deleted, []); + + const remaining = await readdir(tmpDir); + assert.ok(remaining.includes('dwf-conventions.mdc')); + }); + + it('returns empty array for non-existent directory', async () => { + const nonExistent = join(tmpDir, 'does-not-exist'); + const deleted = await cleanStaleFiles(nonExistent, 'dwf-', '.md', new Set()); + + assert.deepEqual(deleted, []); + }); + + it('removes all stale files when currentFiles is empty', async () => { + await writeFile(join(tmpDir, 'dwf-conventions.md'), 'content'); + await writeFile(join(tmpDir, 'dwf-security.md'), 'content'); + + const deleted = await cleanStaleFiles(tmpDir, 'dwf-', '.md', new Set()); + + assert.equal(deleted.length, 2); + assert.ok(deleted.includes('dwf-conventions.md')); + assert.ok(deleted.includes('dwf-security.md')); + + const remaining = await readdir(tmpDir); + assert.equal(remaining.length, 0); + }); +}); diff --git a/packages/cli/tests/e2e/cli.test.ts b/packages/cli/tests/e2e/cli.test.ts index 155796e..c3ca300 100644 --- a/packages/cli/tests/e2e/cli.test.ts +++ b/packages/cli/tests/e2e/cli.test.ts @@ -78,7 +78,7 @@ describe('devw CLI e2e', () => { assert.ok(result.stderr.includes('already exists')); }); - it('compile generates CLAUDE.md with markers when rules exist', async () => { + it('compile generates CLAUDE.md when rules exist', async () => { await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); // Write a manual rule so compile has something to output @@ -99,13 +99,13 @@ rules: assert.equal(result.exitCode, 0); assert.ok(result.stdout.includes('Compiled')); - const claudeMd = await readFile(join(tmpDir, 'CLAUDE.md'), 'utf-8'); - assert.ok(claudeMd.includes('')); - assert.ok(claudeMd.includes('')); + const claudeMd = await readFile(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'), 'utf-8'); + assert.ok(claudeMd.includes('# Conventions')); + assert.ok(claudeMd.includes('Always test your code.')); }); - it('compile preserves user content outside markers', async () => { - await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); + it('compile with copilot preserves user content outside markers', async () => { + await run(['init', '--tools', 'copilot', '--mode', 'copy', '-y'], tmpDir); const rulesPath = join(tmpDir, '.dwf', 'rules', 'conventions.yml'); await writeFile( @@ -121,14 +121,14 @@ rules: await run(['compile'], tmpDir); - const claudeMd = await readFile(join(tmpDir, 'CLAUDE.md'), 'utf-8'); - const withUserContent = `# My Custom Rules\n\nDo not touch this.\n\n${claudeMd}\n# Footer\n\nAlso keep this.\n`; - await writeFile(join(tmpDir, 'CLAUDE.md'), withUserContent, 'utf-8'); + const copilotMd = await readFile(join(tmpDir, '.github', 'copilot-instructions.md'), 'utf-8'); + const withUserContent = `# My Custom Rules\n\nDo not touch this.\n\n${copilotMd}\n# Footer\n\nAlso keep this.\n`; + await writeFile(join(tmpDir, '.github', 'copilot-instructions.md'), withUserContent, 'utf-8'); const result = await run(['compile'], tmpDir); assert.equal(result.exitCode, 0); - const updated = await readFile(join(tmpDir, 'CLAUDE.md'), 'utf-8'); + const updated = await readFile(join(tmpDir, '.github', 'copilot-instructions.md'), 'utf-8'); assert.ok(updated.includes('# My Custom Rules')); assert.ok(updated.includes('Do not touch this.')); assert.ok(updated.includes('# Footer')); @@ -175,6 +175,7 @@ rules: it('doctor passes on valid project', async () => { await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); + await run(['compile'], tmpDir); const result = await run(['doctor'], tmpDir); assert.equal(result.exitCode, 0); @@ -224,13 +225,13 @@ rules: assert.ok(result.stderr.includes('Invalid rule path')); }); - it('remove without pulled rules shows warning', async () => { + it('remove without args in non-TTY shows usage error', async () => { await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); - // Non-TTY, no args, no pulled → should warn + // Non-TTY, no args → should error with usage hint const result = await run(['remove'], tmpDir); - assert.equal(result.exitCode, 0); - assert.ok(result.stdout.includes('Nothing installed to remove')); + assert.equal(result.exitCode, 1); + assert.ok(result.stderr.includes('No rule specified')); }); it('remove with old block format exits with error', async () => { @@ -256,4 +257,83 @@ rules: assert.equal(result.exitCode, 1); assert.ok(result.stderr.includes('not installed')); }); + + it('compile generates multi-file output for directory bridges', async () => { + await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); + + // Write two scope files + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + await writeFile( + join(tmpDir, '.dwf', 'rules', 'security.yml'), + `scope: security +rules: + - id: no-eval + severity: error + content: Never use eval. +`, + 'utf-8', + ); + + const result = await run(['compile'], tmpDir); + + assert.equal(result.exitCode, 0); + + // Both scope files should be generated + const convFile = await readFile(join(tmpDir, '.claude', 'rules', 'dwf-conventions.md'), 'utf-8'); + assert.ok(convFile.includes('named exports')); + + const secFile = await readFile(join(tmpDir, '.claude', 'rules', 'dwf-security.md'), 'utf-8'); + assert.ok(secFile.includes('eval')); + }); + + it('compile --dry-run lists files without writing', async () => { + await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); + + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const result = await run(['compile', '--dry-run'], tmpDir); + + assert.equal(result.exitCode, 0); + assert.ok(result.stdout.includes('Dry run')); + assert.ok(result.stdout.includes('.claude/rules/dwf-conventions.md')); + }); + + it('explain shows multi-file output paths for directory bridges', async () => { + await run(['init', '--tools', 'claude', '--mode', 'copy', '-y'], tmpDir); + + await writeFile( + join(tmpDir, '.dwf', 'rules', 'conventions.yml'), + `scope: conventions +rules: + - id: named-exports + severity: error + content: Always use named exports. +`, + 'utf-8', + ); + + const result = await run(['explain'], tmpDir); + + assert.equal(result.exitCode, 0); + assert.ok(result.stdout.includes('multi-file')); + assert.ok(result.stdout.includes('.claude/rules/dwf-')); + }); }); diff --git a/packages/cli/tests/ui/output.test.ts b/packages/cli/tests/ui/output.test.ts index 6b88b9c..470c1b6 100644 --- a/packages/cli/tests/ui/output.test.ts +++ b/packages/cli/tests/ui/output.test.ts @@ -19,11 +19,11 @@ interface RunResult { exitCode: number; } -async function run(args: string[], cwd: string): Promise { +async function run(args: string[], cwd: string, extraEnv?: Record): Promise { try { const { stdout, stderr } = await execFile(NODE, [DEVW, ...args], { cwd, - env: { ...process.env, NO_COLOR: '1', FORCE_COLOR: '0' }, + env: { ...process.env, NO_COLOR: '1', FORCE_COLOR: '0', ...extraEnv }, }); return { stdout, stderr, exitCode: 0 }; } catch (err: unknown) { @@ -92,6 +92,8 @@ describe('output format: compile', () => { assert.ok(result.stdout.includes('\u2192'), 'should have arrow'); assert.ok(result.stdout.includes('file'), 'should mention files'); assert.ok(/\(\d+ms\)/.test(result.stdout), 'should have timing'); + assert.ok(result.stdout.includes('bridge'), 'should include summary table headers'); + assert.ok(result.stdout.includes('generated'), 'should include generated column'); }); it('shows file list with bullet prefix', async () => { @@ -102,7 +104,27 @@ describe('output format: compile', () => { const result = await run(['compile'], tmpDir); assert.ok(result.stdout.includes('\u203A'), 'should have bullet prefix'); - assert.ok(result.stdout.includes('CLAUDE.md'), 'should list CLAUDE.md'); + assert.ok(result.stdout.includes('.claude/rules/dwf-conventions.md'), 'should list .claude/rules/dwf-conventions.md'); + }); +}); + +describe('output format: help', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'devw-output-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('does not print the banner in non-TTY output', async () => { + const result = await run(['--help'], tmpDir); + + assert.equal(result.exitCode, 0); + assert.ok(result.stdout.includes('Usage: devw'), 'should print standard help text'); + assert.equal(result.stdout.includes('____ __ __'), false); }); }); @@ -137,6 +159,9 @@ describe('output format: doctor', () => { await writeFile(join(tmpDir, '.dwf', 'config.yml'), CONFIG_TEMPLATE(['claude'])); await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), RULES_CONVENTIONS); + // Compile first so canonical checks pass; symlink check should still be skipped in copy mode + await run(['compile'], tmpDir); + const result = await run(['doctor'], tmpDir); assert.equal(result.exitCode, 0); @@ -191,7 +216,19 @@ describe('output format: list tools', () => { assert.equal(result.exitCode, 0); assert.ok(result.stdout.includes('\u203A'), 'should have bullet prefix'); assert.ok(result.stdout.includes('\u2192'), 'should have arrow'); - assert.ok(result.stdout.includes('CLAUDE.md'), 'should show output path'); + assert.ok(result.stdout.includes('.claude/rules/dwf-'), 'should show output directory pattern'); + assert.ok(result.stdout.includes('(0 files)'), 'should show per-tool multi-file count'); + }); + + it('uses singular grammar for one active scope file', async () => { + await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), CONFIG_TEMPLATE(['claude'])); + await writeFile(join(tmpDir, '.dwf', 'rules', 'conventions.yml'), RULES_CONVENTIONS); + + const result = await run(['list', 'tools'], tmpDir); + + assert.equal(result.exitCode, 0); + assert.ok(result.stdout.includes('(1 file)')); }); }); @@ -233,13 +270,13 @@ describe('output format: explain', () => { it('shows new mode labels', async () => { await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); - await writeFile(join(tmpDir, '.dwf', 'config.yml'), CONFIG_TEMPLATE(['claude', 'cursor'])); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), CONFIG_TEMPLATE(['copilot', 'cursor'])); await writeFile(join(tmpDir, '.dwf', 'rules', 'architecture.yml'), RULES_WITH_MIX); const result = await run(['explain'], tmpDir); - assert.ok(result.stdout.includes('markers (BEGIN/END)'), 'should show markers mode for claude'); - assert.ok(result.stdout.includes('full file'), 'should show full file mode for cursor'); + assert.ok(result.stdout.includes('markers (BEGIN/END)'), 'should show markers mode for copilot'); + assert.ok(result.stdout.includes('multi-file (one per scope)'), 'should show multi-file mode for cursor'); }); }); @@ -269,4 +306,14 @@ describe('output format: error messages', () => { assert.equal(result.exitCode, 1); assert.ok(result.stderr.includes('\u2717'), 'should have error icon'); }); + + it('remove without args in non-interactive mode shows usage hint', async () => { + await mkdir(join(tmpDir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(tmpDir, '.dwf', 'config.yml'), CONFIG_TEMPLATE(['claude'])); + + const result = await run(['remove'], tmpDir, { CI: 'true' }); + + assert.equal(result.exitCode, 1); + assert.ok(result.stderr.includes('Usage: devw remove /')); + }); }); diff --git a/packages/cli/tests/utils/banner.test.ts b/packages/cli/tests/utils/banner.test.ts new file mode 100644 index 0000000..a251068 --- /dev/null +++ b/packages/cli/tests/utils/banner.test.ts @@ -0,0 +1,48 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { renderBanner } from '../../src/utils/banner.js'; + +describe('renderBanner', () => { + let originalStdoutIsTTY: boolean | undefined; + + beforeEach(() => { + originalStdoutIsTTY = process.stdout.isTTY; + }); + + afterEach(() => { + Object.defineProperty(process.stdout, 'isTTY', { + value: originalStdoutIsTTY, + writable: true, + configurable: true, + }); + }); + + it('returns empty string when stdout is not a TTY', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + + assert.equal(renderBanner(), ''); + }); + + it('returns deterministic ANSI banner for TTY output', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + + const expected = [ + '\u001b[38;5;45m██████╗ ███████╗██╗ ██╗██╗ ██╗\u001b[0m', + '\u001b[38;5;76m██╔══██╗██╔════╝██║ ██║██║ ██║\u001b[0m', + '\u001b[38;5;107m██║ ██║█████╗ ██║ ██║██║ █╗ ██║\u001b[0m', + '\u001b[38;5;139m██║ ██║██╔══╝ ╚██╗ ██╔╝██║███╗██║\u001b[0m', + '\u001b[38;5;170m██████╔╝███████╗ ╚████╔╝ ╚███╔███╔╝\u001b[0m', + '\u001b[38;5;201m╚═════╝ ╚══════╝ ╚═══╝ ╚══╝╚══╝\u001b[0m', + ].join('\n'); + + assert.equal(renderBanner(), expected); + }); +}); diff --git a/packages/cli/tests/utils/cache.edge.test.ts b/packages/cli/tests/utils/cache.edge.test.ts index 34b83ad..6e00e88 100644 --- a/packages/cli/tests/utils/cache.edge.test.ts +++ b/packages/cli/tests/utils/cache.edge.test.ts @@ -3,7 +3,7 @@ import assert from 'node:assert/strict'; import { mkdtemp, rm, mkdir, writeFile, readFile } from 'node:fs/promises'; import { join } from 'node:path'; import { tmpdir } from 'node:os'; -import { get, set, getFromDisk } from '../../src/utils/cache.js'; +import { get, set, getFromDisk, fetchWithETag } from '../../src/utils/cache.js'; describe('cache edge cases', () => { let tempDir: string; @@ -38,7 +38,7 @@ describe('cache edge cases', () => { }, }; await writeFile( - join(tempDir, '.dwf', '.cache', 'registry.json'), + join(tempDir, '.dwf', '.cache', 'registry-store.json'), JSON.stringify(store), 'utf-8', ); @@ -51,7 +51,7 @@ describe('cache edge cases', () => { describe('malformed store', () => { it('readStore returns {} when cache file is a JSON array', async () => { await writeFile( - join(tempDir, '.dwf', '.cache', 'registry.json'), + join(tempDir, '.dwf', '.cache', 'registry-store.json'), JSON.stringify([1, 2, 3]), 'utf-8', ); @@ -82,7 +82,7 @@ describe('cache edge cases', () => { const after = Date.now(); const raw = await readFile( - join(tempDir, '.dwf', '.cache', 'registry.json'), + join(tempDir, '.dwf', '.cache', 'registry-store.json'), 'utf-8', ); const store = JSON.parse(raw) as Record; @@ -99,4 +99,79 @@ describe('cache edge cases', () => { assert.equal(result, null); }); }); + + describe('fetchWithETag', () => { + const originalFetch = globalThis.fetch; + + afterEach(() => { + globalThis.fetch = originalFetch; + }); + + it('writes cache and etag on 200 response', async () => { + globalThis.fetch = async () => + new Response(JSON.stringify({ version: 1 }), { + status: 200, + headers: { + etag: 'W/"abc123"', + 'content-type': 'application/json', + }, + }); + + const cacheDir = join(tempDir, '.dwf', '.cache'); + const result = await fetchWithETag<{ version: number }>('https://example.com/registry.json', cacheDir, 'registry'); + + assert.equal(result.fromCache, false); + assert.deepEqual(result.data, { version: 1 }); + + const dataRaw = await readFile(join(cacheDir, 'registry.json'), 'utf-8'); + const etagRaw = await readFile(join(cacheDir, 'registry.etag'), 'utf-8'); + assert.deepEqual(JSON.parse(dataRaw), { version: 1 }); + assert.equal(etagRaw.trim(), 'W/"abc123"'); + }); + + it('uses local cache when server returns 304', async () => { + const cacheDir = join(tempDir, '.dwf', '.cache'); + await writeFile(join(cacheDir, 'registry.json'), JSON.stringify({ cached: true }), 'utf-8'); + await writeFile(join(cacheDir, 'registry.etag'), 'W/"etag-a"\n', 'utf-8'); + + globalThis.fetch = async (_url, init) => { + const headers = (init?.headers ?? {}) as Record; + assert.equal(headers['If-None-Match'], 'W/"etag-a"'); + return new Response(null, { status: 304 }); + }; + + const result = await fetchWithETag<{ cached: boolean }>('https://example.com/registry.json', cacheDir, 'registry'); + assert.equal(result.fromCache, true); + assert.deepEqual(result.data, { cached: true }); + }); + + it('falls back to cache when network fails', async () => { + const cacheDir = join(tempDir, '.dwf', '.cache'); + await writeFile(join(cacheDir, 'registry.json'), JSON.stringify({ offline: true }), 'utf-8'); + + globalThis.fetch = async () => { + throw new Error('network down'); + }; + + const result = await fetchWithETag<{ offline: boolean }>('https://example.com/registry.json', cacheDir, 'registry'); + assert.equal(result.fromCache, true); + assert.deepEqual(result.data, { offline: true }); + }); + + it('throws clear error when no cache and request fails', async () => { + const cacheDir = join(tempDir, '.dwf', '.cache'); + + globalThis.fetch = async () => { + throw new Error('network down'); + }; + + await assert.rejects( + () => fetchWithETag('https://example.com/registry.json', cacheDir, 'registry'), + (error: Error) => { + assert.match(error.message, /Unable to fetch registry/); + return true; + }, + ); + }); + }); }); diff --git a/packages/cli/tests/utils/cache.test.ts b/packages/cli/tests/utils/cache.test.ts index d326927..716eff3 100644 --- a/packages/cli/tests/utils/cache.test.ts +++ b/packages/cli/tests/utils/cache.test.ts @@ -39,7 +39,7 @@ describe('cache', () => { it('handles corrupted cache file gracefully', async () => { const { writeFile } = await import('node:fs/promises'); - await writeFile(join(tempDir, '.dwf', '.cache', 'registry.json'), 'not json!!!', 'utf-8'); + await writeFile(join(tempDir, '.dwf', '.cache', 'registry-store.json'), 'not json!!!', 'utf-8'); const result = await getFromDisk(tempDir, 'key'); assert.equal(result, null); }); diff --git a/packages/cli/tests/utils/legacy-imports.test.ts b/packages/cli/tests/utils/legacy-imports.test.ts new file mode 100644 index 0000000..6b175f2 --- /dev/null +++ b/packages/cli/tests/utils/legacy-imports.test.ts @@ -0,0 +1,45 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { readdir, readFile } from 'node:fs/promises'; +import { join } from 'node:path'; + +const SRC_ROOT = join(process.cwd(), 'src'); + +async function collectTypeScriptFiles(root: string): Promise { + const entries = await readdir(root, { withFileTypes: true }); + const files: string[] = []; + + for (const entry of entries) { + const fullPath = join(root, entry.name); + if (entry.isDirectory()) { + const nested = await collectTypeScriptFiles(fullPath); + files.push(...nested); + continue; + } + + if (entry.isFile() && fullPath.endsWith('.ts')) { + files.push(fullPath); + } + } + + return files; +} + +describe('legacy imports are fully removed from src', () => { + it('does not use chalk or @inquirer/prompts', async () => { + const tsFiles = await collectTypeScriptFiles(SRC_ROOT); + const offenders: string[] = []; + + for (const filePath of tsFiles) { + const content = await readFile(filePath, 'utf-8'); + if (content.includes("'chalk'") || content.includes('"chalk"')) { + offenders.push(`${filePath}: chalk`); + } + if (content.includes('@inquirer/prompts')) { + offenders.push(`${filePath}: @inquirer/prompts`); + } + } + + assert.deepEqual(offenders, []); + }); +}); diff --git a/packages/cli/tests/utils/prompt.test.ts b/packages/cli/tests/utils/prompt.test.ts new file mode 100644 index 0000000..4f5c3e7 --- /dev/null +++ b/packages/cli/tests/utils/prompt.test.ts @@ -0,0 +1,150 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { + isInteractiveSession, + selectPrompt, + spinnerTask, + introPrompt, + outroPrompt, + notePrompt, +} from '../../src/utils/prompt.js'; + +describe('prompt utils', () => { + let originalStdoutIsTTY: boolean | undefined; + let originalStdinIsTTY: boolean | undefined; + let originalCI: string | undefined; + + beforeEach(() => { + originalStdoutIsTTY = process.stdout.isTTY; + originalStdinIsTTY = process.stdin.isTTY; + originalCI = process.env['CI']; + }); + + afterEach(() => { + Object.defineProperty(process.stdout, 'isTTY', { + value: originalStdoutIsTTY, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: originalStdinIsTTY, + writable: true, + configurable: true, + }); + + if (originalCI === undefined) { + delete process.env['CI']; + } else { + process.env['CI'] = originalCI; + } + }); + + it('returns false when stdout is not a TTY', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + delete process.env['CI']; + + assert.equal(isInteractiveSession(), false); + }); + + it('returns false when CI is enabled even if both streams are TTY', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + process.env['CI'] = 'true'; + + assert.equal(isInteractiveSession(), false); + }); + + it('returns true for interactive non-CI session', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: true, + writable: true, + configurable: true, + }); + process.env['CI'] = 'false'; + + assert.equal(isInteractiveSession(), true); + }); + + it('throws a helpful message for prompts in non-interactive mode', async () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + + await assert.rejects( + async () => selectPrompt({ message: 'Pick one', options: [{ label: 'one', value: 'one' }] }), + (error: unknown) => { + assert.ok(error instanceof Error); + assert.match(error.message, /non-interactive mode/); + return true; + }, + ); + }); + + it('runs spinner tasks without clack spinner in non-interactive mode', async () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + + const result = await spinnerTask({ + label: 'work', + task: async () => 'ok', + }); + + assert.equal(result, 'ok'); + }); + + it('intro/outro/note are no-ops in non-interactive mode', () => { + Object.defineProperty(process.stdout, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + Object.defineProperty(process.stdin, 'isTTY', { + value: false, + writable: true, + configurable: true, + }); + + assert.doesNotThrow(() => { + introPrompt('hello'); + notePrompt('body', 'title'); + outroPrompt('bye'); + }); + }); +}); diff --git a/packages/cli/tests/utils/registry.test.ts b/packages/cli/tests/utils/registry.test.ts new file mode 100644 index 0000000..52da7d3 --- /dev/null +++ b/packages/cli/tests/utils/registry.test.ts @@ -0,0 +1,143 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { searchRegistry, filterRegistryByTag } from '../../src/utils/registry.js'; +import type { Registry } from '../../src/utils/registry.js'; + +function makeRegistry(rules: Array<{ path: string; name: string; description: string; tags: string[] }>): Registry { + return { + version: 1, + generated_at: '2026-04-12T00:00:00Z', + rules: rules.map((r) => ({ + ...r, + version: '0.1.0', + scope: 'conventions', + size_bytes: 100, + })), + assets: { commands: [], templates: [], hooks: [], presets: [] }, + }; +} + +const SAMPLE_REGISTRY = makeRegistry([ + { path: 'typescript/strict', name: 'Strict TypeScript', description: 'Enforce strict TypeScript conventions', tags: ['typescript', 'strict', 'types'] }, + { path: 'typescript/react', name: 'React TypeScript', description: 'TypeScript patterns for React components', tags: ['typescript', 'react', 'frontend'] }, + { path: 'security/supabase-rls', name: 'Supabase RLS', description: 'Row-level security policies for Supabase', tags: ['security', 'supabase', 'rls'] }, + { path: 'testing/unit', name: 'Unit Testing', description: 'Best practices for unit testing', tags: ['testing', 'jest', 'vitest'] }, + { path: 'workflow/git', name: 'Git Workflow', description: 'Git branching and commit conventions', tags: ['git', 'workflow'] }, +]); + +describe('searchRegistry', () => { + it('returns all rules for empty query', () => { + const results = searchRegistry(SAMPLE_REGISTRY, ''); + assert.equal(results.length, 5); + }); + + it('returns all rules for whitespace-only query', () => { + const results = searchRegistry(SAMPLE_REGISTRY, ' '); + assert.equal(results.length, 5); + }); + + it('matches by name', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'Supabase'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'security/supabase-rls'); + }); + + it('matches by description', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'branching'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'workflow/git'); + }); + + it('matches by path', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'typescript/strict'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'typescript/strict'); + }); + + it('matches by tag', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'vitest'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'testing/unit'); + }); + + it('is case-insensitive', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'SUPABASE'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'security/supabase-rls'); + }); + + it('supports multi-term AND search', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'typescript react'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'typescript/react'); + }); + + it('returns empty when no terms match', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'python django'); + assert.equal(results.length, 0); + }); + + it('returns empty when only one of AND terms matches', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'typescript django'); + assert.equal(results.length, 0); + }); + + it('matches multiple rules', () => { + const results = searchRegistry(SAMPLE_REGISTRY, 'typescript'); + assert.equal(results.length, 2); + const paths = results.map((r) => r.path); + assert.ok(paths.includes('typescript/strict')); + assert.ok(paths.includes('typescript/react')); + }); + + it('handles empty registry', () => { + const empty = makeRegistry([]); + const results = searchRegistry(empty, 'anything'); + assert.equal(results.length, 0); + }); +}); + +describe('filterRegistryByTag', () => { + it('returns all rules for empty tag', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, ''); + assert.equal(results.length, 5); + }); + + it('returns all rules for whitespace-only tag', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, ' '); + assert.equal(results.length, 5); + }); + + it('filters by exact tag match', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, 'security'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'security/supabase-rls'); + }); + + it('is case-insensitive', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, 'REACT'); + assert.equal(results.length, 1); + assert.equal(results[0]?.path, 'typescript/react'); + }); + + it('returns multiple matches', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, 'typescript'); + assert.equal(results.length, 2); + }); + + it('returns empty for non-existent tag', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, 'python'); + assert.equal(results.length, 0); + }); + + it('does not partial-match tags', () => { + const results = filterRegistryByTag(SAMPLE_REGISTRY, 'type'); + assert.equal(results.length, 0); + }); + + it('handles empty registry', () => { + const empty = makeRegistry([]); + const results = filterRegistryByTag(empty, 'anything'); + assert.equal(results.length, 0); + }); +}); diff --git a/packages/cli/tests/utils/table.test.ts b/packages/cli/tests/utils/table.test.ts new file mode 100644 index 0000000..1334fad --- /dev/null +++ b/packages/cli/tests/utils/table.test.ts @@ -0,0 +1,36 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { renderTable } from '../../src/utils/table.js'; + +describe('renderTable', () => { + it('renders deterministic borders and rows', () => { + const output = renderTable( + ['bridge', 'generated', 'failed'], + [ + ['claude', '2', '0'], + ['gemini', '1', '1'], + ], + ); + + const expected = [ + ' ┌────────┬───────────┬────────┐', + ' │ bridge │ generated │ failed │', + ' ├────────┼───────────┼────────┤', + ' │ claude │ 2 │ 0 │', + ' │ gemini │ 1 │ 1 │', + ' └────────┴───────────┴────────┘', + ].join('\n'); + + assert.equal(output, expected); + }); + + it('returns empty string when headers are empty', () => { + assert.equal(renderTable([], [['value']]), ''); + }); + + it('honors minimum column widths', () => { + const output = renderTable(['id'], [['x']], [5]); + assert.ok(output.includes('│ id │')); + assert.ok(output.includes('│ x │')); + }); +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 2ee518a..bb6d862 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -14,18 +14,18 @@ importers: packages/cli: dependencies: - '@inquirer/prompts': - specifier: ^7.0.0 - version: 7.10.1(@types/node@22.19.9) - chalk: - specifier: ^5.4.0 - version: 5.6.2 + '@clack/prompts': + specifier: ^0.9.0 + version: 0.9.1 chokidar: specifier: ^3.6.0 version: 3.6.0 commander: specifier: ^13.0.0 version: 13.1.0 + picocolors: + specifier: ^1.1.0 + version: 1.1.1 yaml: specifier: ^2.7.0 version: 2.8.2 @@ -98,54 +98,11 @@ packages: '@changesets/write@0.4.0': resolution: {integrity: sha512-CdTLvIOPiCNuH71pyDu3rA+Q0n65cmAbXnwWH84rKGiFumFzkmHNT8KHTMEchcxN+Kl8I54xGUhJ7l3E7X396Q==} - '@inquirer/ansi@1.0.2': - resolution: {integrity: sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==} - engines: {node: '>=18'} - - '@inquirer/checkbox@4.3.2': - resolution: {integrity: sha512-VXukHf0RR1doGe6Sm4F0Em7SWYLTHSsbGfJdS9Ja2bX5/D5uwVOEjr07cncLROdBvmnvCATYEWlHqYmXv2IlQA==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/confirm@5.1.21': - resolution: {integrity: sha512-KR8edRkIsUayMXV+o3Gv+q4jlhENF9nMYUZs9PA2HzrXeHI8M5uDag70U7RJn9yyiMZSbtF5/UexBtAVtZGSbQ==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/core@10.3.2': - resolution: {integrity: sha512-43RTuEbfP8MbKzedNqBrlhhNKVwoK//vUFNW3Q3vZ88BLcrs4kYpGg+B2mm5p2K/HfygoCxuKwJJiv8PbGmE0A==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/editor@4.2.23': - resolution: {integrity: sha512-aLSROkEwirotxZ1pBaP8tugXRFCxW94gwrQLxXfrZsKkfjOYC1aRvAZuhpJOb5cu4IBTJdsCigUlf2iCOu4ZDQ==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true + '@clack/core@0.4.1': + resolution: {integrity: sha512-Pxhij4UXg8KSr7rPek6Zowm+5M22rbd2g1nfojHJkxp5YkFqiZ2+YLEM/XGVIzvGOcM0nqjIFxrpDwWRZYWYjA==} - '@inquirer/expand@4.0.23': - resolution: {integrity: sha512-nRzdOyFYnpeYTTR2qFwEVmIWypzdAx/sIkCMeTNTcflFOovfqUk+HcFhQQVBftAh9gmGrpFj6QcGEqrDMDOiew==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true + '@clack/prompts@0.9.1': + resolution: {integrity: sha512-JIpyaboYZeWYlyP0H+OoPPxd6nqueG/CmN6ixBiNFsIDHREevjIf0n0Ohh5gr5C8pEDknzgvz+pIJ8dMhzWIeg==} '@inquirer/external-editor@1.0.3': resolution: {integrity: sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==} @@ -156,82 +113,6 @@ packages: '@types/node': optional: true - '@inquirer/figures@1.0.15': - resolution: {integrity: sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==} - engines: {node: '>=18'} - - '@inquirer/input@4.3.1': - resolution: {integrity: sha512-kN0pAM4yPrLjJ1XJBjDxyfDduXOuQHrBB8aLDMueuwUGn+vNpF7Gq7TvyVxx8u4SHlFFj4trmj+a2cbpG4Jn1g==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/number@3.0.23': - resolution: {integrity: sha512-5Smv0OK7K0KUzUfYUXDXQc9jrf8OHo4ktlEayFlelCjwMXz0299Y8OrI+lj7i4gCBY15UObk76q0QtxjzFcFcg==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/password@4.0.23': - resolution: {integrity: sha512-zREJHjhT5vJBMZX/IUbyI9zVtVfOLiTO66MrF/3GFZYZ7T4YILW5MSkEYHceSii/KtRk+4i3RE7E1CUXA2jHcA==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/prompts@7.10.1': - resolution: {integrity: sha512-Dx/y9bCQcXLI5ooQ5KyvA4FTgeo2jYj/7plWfV5Ak5wDPKQZgudKez2ixyfz7tKXzcJciTxqLeK7R9HItwiByg==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/rawlist@4.1.11': - resolution: {integrity: sha512-+LLQB8XGr3I5LZN/GuAHo+GpDJegQwuPARLChlMICNdwW7OwV2izlCSCxN6cqpL0sMXmbKbFcItJgdQq5EBXTw==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/search@3.2.2': - resolution: {integrity: sha512-p2bvRfENXCZdWF/U2BXvnSI9h+tuA8iNqtUKb9UWbmLYCRQxd8WkvwWvYn+3NgYaNwdUkHytJMGG4MMLucI1kA==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/select@4.4.2': - resolution: {integrity: sha512-l4xMuJo55MAe+N7Qr4rX90vypFwCajSakx59qe/tMaC1aEHWLyw68wF4o0A4SLAY4E0nd+Vt+EyskeDIqu1M6w==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - - '@inquirer/type@3.0.10': - resolution: {integrity: sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==} - engines: {node: '>=18'} - peerDependencies: - '@types/node': '>=18' - peerDependenciesMeta: - '@types/node': - optional: true - '@manypkg/find-root@1.1.0': resolution: {integrity: sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA==} @@ -264,10 +145,6 @@ packages: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} engines: {node: '>=8'} - ansi-styles@4.3.0: - resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} - engines: {node: '>=8'} - anymatch@3.1.3: resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} engines: {node: '>= 8'} @@ -294,10 +171,6 @@ packages: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} - chalk@5.6.2: - resolution: {integrity: sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==} - engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - chardet@2.1.1: resolution: {integrity: sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==} @@ -309,17 +182,6 @@ packages: resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} engines: {node: '>=8'} - cli-width@4.1.0: - resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} - engines: {node: '>= 12'} - - color-convert@2.0.1: - resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} - engines: {node: '>=7.0.0'} - - color-name@1.1.4: - resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - commander@13.1.0: resolution: {integrity: sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==} engines: {node: '>=18'} @@ -336,9 +198,6 @@ packages: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} - emoji-regex@8.0.0: - resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - enquirer@2.4.1: resolution: {integrity: sha512-rRqJg/6gd538VHvR3PSrdRBb/1Vy2YfzHqzvbhGIQpDRKIa4FgV/54b5Q1xYSxOOwKvjXweS26E0Q+nAMwp2pQ==} engines: {node: '>=8.6'} @@ -410,10 +269,6 @@ packages: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} - is-fullwidth-code-point@3.0.0: - resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} - engines: {node: '>=8'} - is-glob@4.0.3: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} @@ -463,10 +318,6 @@ packages: resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} engines: {node: '>=4'} - mute-stream@2.0.0: - resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==} - engines: {node: ^18.17.0 || >=20.5.0} - normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} @@ -570,6 +421,9 @@ packages: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} + sisteransi@1.0.5: + resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -580,10 +434,6 @@ packages: sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - string-width@4.2.3: - resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} - engines: {node: '>=8'} - strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} @@ -617,19 +467,11 @@ packages: engines: {node: '>= 8'} hasBin: true - wrap-ansi@6.2.0: - resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} - engines: {node: '>=8'} - yaml@2.8.2: resolution: {integrity: sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==} engines: {node: '>= 14.6'} hasBin: true - yoctocolors-cjs@2.1.3: - resolution: {integrity: sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==} - engines: {node: '>=18'} - snapshots: '@babel/runtime@7.28.6': {} @@ -778,53 +620,16 @@ snapshots: human-id: 4.1.3 prettier: 2.8.8 - '@inquirer/ansi@1.0.2': {} - - '@inquirer/checkbox@4.3.2(@types/node@22.19.9)': + '@clack/core@0.4.1': dependencies: - '@inquirer/ansi': 1.0.2 - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/figures': 1.0.15 - '@inquirer/type': 3.0.10(@types/node@22.19.9) - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/confirm@5.1.21(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/core@10.3.2(@types/node@22.19.9)': - dependencies: - '@inquirer/ansi': 1.0.2 - '@inquirer/figures': 1.0.15 - '@inquirer/type': 3.0.10(@types/node@22.19.9) - cli-width: 4.1.0 - mute-stream: 2.0.0 - signal-exit: 4.1.0 - wrap-ansi: 6.2.0 - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/editor@4.2.23(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/external-editor': 1.0.3(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 + picocolors: 1.1.1 + sisteransi: 1.0.5 - '@inquirer/expand@4.0.23(@types/node@22.19.9)': + '@clack/prompts@0.9.1': dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 + '@clack/core': 0.4.1 + picocolors: 1.1.1 + sisteransi: 1.0.5 '@inquirer/external-editor@1.0.3(@types/node@22.19.9)': dependencies: @@ -833,76 +638,6 @@ snapshots: optionalDependencies: '@types/node': 22.19.9 - '@inquirer/figures@1.0.15': {} - - '@inquirer/input@4.3.1(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/number@3.0.23(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/password@4.0.23(@types/node@22.19.9)': - dependencies: - '@inquirer/ansi': 1.0.2 - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/prompts@7.10.1(@types/node@22.19.9)': - dependencies: - '@inquirer/checkbox': 4.3.2(@types/node@22.19.9) - '@inquirer/confirm': 5.1.21(@types/node@22.19.9) - '@inquirer/editor': 4.2.23(@types/node@22.19.9) - '@inquirer/expand': 4.0.23(@types/node@22.19.9) - '@inquirer/input': 4.3.1(@types/node@22.19.9) - '@inquirer/number': 3.0.23(@types/node@22.19.9) - '@inquirer/password': 4.0.23(@types/node@22.19.9) - '@inquirer/rawlist': 4.1.11(@types/node@22.19.9) - '@inquirer/search': 3.2.2(@types/node@22.19.9) - '@inquirer/select': 4.4.2(@types/node@22.19.9) - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/rawlist@4.1.11(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/type': 3.0.10(@types/node@22.19.9) - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/search@3.2.2(@types/node@22.19.9)': - dependencies: - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/figures': 1.0.15 - '@inquirer/type': 3.0.10(@types/node@22.19.9) - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/select@4.4.2(@types/node@22.19.9)': - dependencies: - '@inquirer/ansi': 1.0.2 - '@inquirer/core': 10.3.2(@types/node@22.19.9) - '@inquirer/figures': 1.0.15 - '@inquirer/type': 3.0.10(@types/node@22.19.9) - yoctocolors-cjs: 2.1.3 - optionalDependencies: - '@types/node': 22.19.9 - - '@inquirer/type@3.0.10(@types/node@22.19.9)': - optionalDependencies: - '@types/node': 22.19.9 - '@manypkg/find-root@1.1.0': dependencies: '@babel/runtime': 7.28.6 @@ -941,10 +676,6 @@ snapshots: ansi-regex@5.0.1: {} - ansi-styles@4.3.0: - dependencies: - color-convert: 2.0.1 - anymatch@3.1.3: dependencies: normalize-path: 3.0.0 @@ -968,8 +699,6 @@ snapshots: dependencies: fill-range: 7.1.1 - chalk@5.6.2: {} - chardet@2.1.1: {} chokidar@3.6.0: @@ -986,14 +715,6 @@ snapshots: ci-info@3.9.0: {} - cli-width@4.1.0: {} - - color-convert@2.0.1: - dependencies: - color-name: 1.1.4 - - color-name@1.1.4: {} - commander@13.1.0: {} cross-spawn@7.0.6: @@ -1008,8 +729,6 @@ snapshots: dependencies: path-type: 4.0.0 - emoji-regex@8.0.0: {} - enquirer@2.4.1: dependencies: ansi-colors: 4.1.3 @@ -1084,8 +803,6 @@ snapshots: is-extglob@2.1.1: {} - is-fullwidth-code-point@3.0.0: {} - is-glob@4.0.3: dependencies: is-extglob: 2.1.1 @@ -1128,8 +845,6 @@ snapshots: mri@1.2.0: {} - mute-stream@2.0.0: {} - normalize-path@3.0.0: {} outdent@0.5.0: {} @@ -1203,6 +918,8 @@ snapshots: signal-exit@4.1.0: {} + sisteransi@1.0.5: {} + slash@3.0.0: {} spawndamnit@3.0.1: @@ -1212,12 +929,6 @@ snapshots: sprintf-js@1.0.3: {} - string-width@4.2.3: - dependencies: - emoji-regex: 8.0.0 - is-fullwidth-code-point: 3.0.0 - strip-ansi: 6.0.1 - strip-ansi@6.0.1: dependencies: ansi-regex: 5.0.1 @@ -1240,12 +951,4 @@ snapshots: dependencies: isexe: 2.0.0 - wrap-ansi@6.2.0: - dependencies: - ansi-styles: 4.3.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - yaml@2.8.2: {} - - yoctocolors-cjs@2.1.3: {} diff --git a/scripts/generate-registry.js b/scripts/generate-registry.js new file mode 100644 index 0000000..157edbf --- /dev/null +++ b/scripts/generate-registry.js @@ -0,0 +1,146 @@ +#!/usr/bin/env node + +import { readdir, readFile, stat, writeFile } from 'node:fs/promises'; +import { join, relative } from 'node:path'; + +const ROOT_DIR = process.cwd(); +const RULES_DIR = join(ROOT_DIR, 'content', 'rules'); +const OUTPUT_PATH = join(ROOT_DIR, 'content', 'registry.json'); + +function parseScalar(value) { + const trimmed = value.trim(); + + if ((trimmed.startsWith('"') && trimmed.endsWith('"')) || (trimmed.startsWith("'") && trimmed.endsWith("'"))) { + return trimmed.slice(1, -1); + } + + return trimmed; +} + +function parseTags(raw) { + const value = raw.trim(); + if (!value.startsWith('[') || !value.endsWith(']')) { + return []; + } + + const inner = value.slice(1, -1).trim(); + if (inner.length === 0) { + return []; + } + + return inner + .split(',') + .map((entry) => parseScalar(entry)) + .filter((entry) => entry.length > 0); +} + +function parseFrontmatter(markdown) { + const match = /^---\n([\s\S]*?)\n---/.exec(markdown); + if (!match || !match[1]) { + return null; + } + + const metadata = {}; + for (const line of match[1].split('\n')) { + const trimmed = line.trim(); + if (trimmed.length === 0 || trimmed.startsWith('#')) { + continue; + } + + const separatorIndex = trimmed.indexOf(':'); + if (separatorIndex < 1) { + continue; + } + + const key = trimmed.slice(0, separatorIndex).trim(); + const rawValue = trimmed.slice(separatorIndex + 1).trim(); + + if (key === 'tags') { + metadata.tags = parseTags(rawValue); + continue; + } + + metadata[key] = parseScalar(rawValue); + } + + return metadata; +} + +async function collectMarkdownFiles(dir) { + const entries = await readdir(dir, { withFileTypes: true }); + const files = []; + + for (const entry of entries) { + const absolutePath = join(dir, entry.name); + + if (entry.isDirectory()) { + const nested = await collectMarkdownFiles(absolutePath); + files.push(...nested); + continue; + } + + if (entry.isFile() && entry.name.endsWith('.md')) { + files.push(absolutePath); + } + } + + return files; +} + +function normalizeRulePath(absolutePath) { + const relativePath = relative(RULES_DIR, absolutePath).replaceAll('\\', '/'); + return relativePath.replace(/\.md$/, ''); +} + +async function buildRegistry() { + const markdownFiles = await collectMarkdownFiles(RULES_DIR); + const rules = []; + + for (const filePath of markdownFiles) { + if (filePath.endsWith('/README.md')) { + continue; + } + + const markdown = await readFile(filePath, 'utf-8'); + const frontmatter = parseFrontmatter(markdown); + if (!frontmatter) { + continue; + } + + const fileStats = await stat(filePath); + rules.push({ + path: normalizeRulePath(filePath), + name: typeof frontmatter.name === 'string' ? frontmatter.name : '', + description: typeof frontmatter.description === 'string' ? frontmatter.description : '', + version: typeof frontmatter.version === 'string' ? frontmatter.version : '', + scope: typeof frontmatter.scope === 'string' ? frontmatter.scope : '', + tags: Array.isArray(frontmatter.tags) ? frontmatter.tags : [], + size_bytes: fileStats.size, + }); + } + + rules.sort((a, b) => a.path.localeCompare(b.path)); + + return { + version: 1, + generated_at: new Date().toISOString(), + rules, + assets: { + commands: [], + templates: [], + hooks: [], + presets: [], + }, + }; +} + +async function main() { + const registry = await buildRegistry(); + await writeFile(OUTPUT_PATH, `${JSON.stringify(registry, null, 2)}\n`, 'utf-8'); + console.log(`Generated ${registry.rules.length} rules in content/registry.json`); +} + +main().catch((error) => { + console.error(error instanceof Error ? error.message : String(error)); + process.exitCode = 1; +});