diff --git a/CLAUDE.md b/CLAUDE.md index 456c5aa..b76c5ae 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -31,7 +31,8 @@ pnpm dev # dev mode - `docs/internal/CLI_SPEC_v0.2.md` → v0.2 specification (COMPLETE) - `docs/internal/CLI_SPEC_v0.2.1.md` → v0.2.1 UX polish specification (COMPLETE) - `docs/internal/DOCS_SPEC.md` → Mintlify documentation spec (COMPLETE) -- `docs/internal/WATCH_SPEC.md` → v0.3 watch mode specification (ACTIVE — implement this) +- `docs/internal/WATCH_SPEC.md` → v0.3 watch mode specification (COMPLETE) +- `docs/internal/PULL_SPEC.md` → v0.4 Pull rules specification (IMPLEMENT THIS) - `docs/internal/DECISIONS.md` → accepted decisions (source of truth if conflict) - `docs/internal/` is gitignored — internal specs not published diff --git a/content/rules/README.md b/content/rules/README.md new file mode 100644 index 0000000..2aeb35c --- /dev/null +++ b/content/rules/README.md @@ -0,0 +1,49 @@ +# Official Rules + +Rules for AI coding agents, distributed via `devw pull`. + +## Available Rules + +| Rule | Category | Description | Command | +|------|----------|-------------|---------| +| `typescript/strict` | TypeScript | Strict TypeScript conventions | `devw pull typescript/strict` | +| `javascript/react` | JavaScript | React conventions and best practices | `devw pull javascript/react` | +| `javascript/nextjs` | JavaScript | Next.js App Router patterns and RSC | `devw pull javascript/nextjs` | +| `css/tailwind` | CSS | Utility-first Tailwind conventions | `devw pull css/tailwind` | +| `testing/vitest` | Testing | Vitest testing patterns | `devw pull testing/vitest` | +| `security/supabase-rls` | Security | Supabase RLS enforcement | `devw pull security/supabase-rls` | + +## Usage + +```bash +# List all available rules +devw pull --list + +# Pull a specific rule +devw pull typescript/strict + +# Preview without writing +devw pull typescript/strict --dry-run + +# Force overwrite +devw pull typescript/strict --force +``` + +## Rule Format + +Each rule file uses YAML frontmatter and Markdown bullets: + +```markdown +--- +name: rule-name +description: "Short description" +version: "0.1.0" +scope: conventions +tags: [tag1, tag2] +--- + +## Section + +- Rule text as a bullet. + Continuation indented. +``` diff --git a/content/rules/css/tailwind.md b/content/rules/css/tailwind.md new file mode 100644 index 0000000..b28292d --- /dev/null +++ b/content/rules/css/tailwind.md @@ -0,0 +1,28 @@ +--- +name: tailwind +description: "Utility-first Tailwind CSS conventions and design tokens" +version: "0.1.0" +scope: conventions +tags: [tailwind, css, styling] +--- + +## Utilities + +- Use Tailwind utility classes for all styling. Do not write + custom CSS unless absolutely necessary (e.g. complex + animations or third-party overrides). + +- Avoid `@apply` in CSS files. Extract reusable patterns into + React components instead of creating CSS abstractions. + +- Keep className strings readable. Break long class lists across + multiple lines and group related utilities together. + +## Design Tokens + +- Use Tailwind's design tokens (spacing, colors, typography) + from the theme config. Avoid arbitrary values like + `w-[137px]`; prefer the closest token. + +- Extend the theme in `tailwind.config` for project-specific + tokens. Do not hardcode colors or spacing outside the config. diff --git a/content/rules/javascript/nextjs.md b/content/rules/javascript/nextjs.md new file mode 100644 index 0000000..e30571d --- /dev/null +++ b/content/rules/javascript/nextjs.md @@ -0,0 +1,39 @@ +--- +name: nextjs +description: "Next.js App Router patterns and React Server Components" +version: "0.1.0" +scope: architecture +tags: [nextjs, react, app-router, rsc] +--- + +## Server Components + +- Minimize `"use client"` directives. Default to Server Components. + Only add `"use client"` when the component needs browser APIs, + event handlers, or React hooks that require client state. + +- Fetch data in Server Components or server actions, not in + client components with `useEffect`. Use React Suspense for + loading states. + +- Keep Server Components free of side effects. Data fetching + and rendering only; mutations belong in server actions. + +## Routing + +- Follow the App Router file conventions: `page.tsx`, `layout.tsx`, + `loading.tsx`, `error.tsx`, `not-found.tsx`. Do not create custom + routing abstractions. + +- Use route groups `(group)` to organize routes without affecting + the URL structure. Use parallel routes and intercepting routes + when needed. + +## Server Actions + +- Prefer server actions for form submissions and data mutations. + Define them with `"use server"` in a separate file or at the + top of an async function. + +- Validate all inputs in server actions. Never trust data coming + from the client even in server-side code. diff --git a/content/rules/javascript/react.md b/content/rules/javascript/react.md new file mode 100644 index 0000000..5b583a8 --- /dev/null +++ b/content/rules/javascript/react.md @@ -0,0 +1,43 @@ +--- +name: react +description: "React conventions and best practices for AI coding agents" +version: "0.1.0" +scope: conventions +tags: [react, frontend, components, hooks] +--- + +## Components + +- Always use named exports. Never use default exports. + This applies to all files: components, utilities, hooks, and types. + +- Use PascalCase for component names and their files + (`UserProfile.tsx`). Use camelCase for hook files prefixed + with `use` (`useAuth.ts`). + +- Prefer composition over prop drilling. Use children, + render props, or context for shared behavior rather than + deeply nested prop chains. + +- Colocate related files: component, hook, utils, and types + in the same feature folder. + +## Hooks + +- Follow the Rules of Hooks: only call hooks at the top level, + never inside conditions or loops. Custom hooks must start + with `use`. + +- Extract complex logic into custom hooks. A component should + primarily handle rendering; business logic belongs in hooks. + +- Use `useMemo` and `useCallback` only when there is a measured + performance problem. Premature memoization adds complexity. + +## Styling + +- Avoid inline styles. Use CSS modules, Tailwind classes, + or styled-components for styling. + +- Keep className logic simple. Extract complex conditional + classes into a helper or use a utility like `clsx`. diff --git a/content/rules/security/supabase-rls.md b/content/rules/security/supabase-rls.md new file mode 100644 index 0000000..76058a4 --- /dev/null +++ b/content/rules/security/supabase-rls.md @@ -0,0 +1,30 @@ +--- +name: supabase-rls +description: "Supabase Row-Level Security enforcement and auth patterns" +version: "0.1.0" +scope: security +tags: [supabase, rls, security, database] +--- + +## RLS Policies + +- Every new table must have RLS policies before merging. + Enable RLS with `ALTER TABLE ... ENABLE ROW LEVEL SECURITY` + and create at least one policy per operation (SELECT, INSERT, + UPDATE, DELETE) as needed. + +- Always use `auth.uid()` in RLS policies to scope data to + the authenticated user. Never rely on client-provided + user IDs in queries. + +- Test RLS policies in isolation. Write SQL tests that verify + access is denied for unauthorized users before merging. + +## Auth Keys + +- Never expose the `service_role` key to the client. + Use the anon key in browser code and the `service_role` key + only in server-side or admin contexts. + +- Store Supabase keys in environment variables. Never hardcode + keys in source files or commit them to version control. diff --git a/content/rules/testing/vitest.md b/content/rules/testing/vitest.md new file mode 100644 index 0000000..aa4266d --- /dev/null +++ b/content/rules/testing/vitest.md @@ -0,0 +1,38 @@ +--- +name: vitest +description: "Vitest testing patterns and best practices" +version: "0.1.0" +scope: testing +tags: [vitest, testing, unit-tests] +--- + +## Test Structure + +- Use descriptive test names that explain the expected behavior. + Follow the pattern: `should [expected behavior] when [condition]`. + +- Structure tests with Arrange-Act-Assert (AAA) pattern. + Separate setup, execution, and verification into distinct + sections for readability. + +- Group related tests with `describe` blocks. One `describe` + per function or feature under test. + +## Assertions + +- Test behavior, not implementation details. Avoid asserting + on internal state, private methods, or specific function calls + unless testing integration points. + +- Prefer specific assertions (`toEqual`, `toContain`, `toThrow`) + over generic ones (`toBeTruthy`). Specific assertions give + better failure messages. + +## Mocking + +- Only mock at system boundaries: network requests, databases, + file system, and third-party services. Do not mock internal + modules or utility functions. + +- Use `vi.fn()` for function spies and `vi.mock()` for module + mocks. Restore all mocks after each test with `afterEach`. diff --git a/content/rules/typescript/strict.md b/content/rules/typescript/strict.md new file mode 100644 index 0000000..9bf8847 --- /dev/null +++ b/content/rules/typescript/strict.md @@ -0,0 +1,37 @@ +--- +name: strict +description: "Strict TypeScript conventions for professional codebases" +version: "0.1.0" +scope: conventions +tags: [typescript, strict, types] +--- + +## Type Safety + +- Never use `any`. Use `unknown` when the type is truly unknown, + then narrow with type guards. + +- Always declare explicit return types on exported functions. + Inferred types are fine for internal/private functions. + +- Never use non-null assertion (`!`). Handle null/undefined explicitly + with optional chaining, nullish coalescing, or type guards. + +## Types and Enums + +- Prefer union types over enums. + Use `as const` objects when you need runtime values. + +- Prefer `interface` for object shapes that may be extended. + Use `type` for unions, intersections, and mapped types. + +- Use `satisfies` to validate object literals against a type + while preserving the narrowest inferred type. + +## Generics + +- Name generic parameters descriptively when the meaning + is not obvious. Prefer `TItem` over `T` in complex signatures. + +- Constrain generic parameters with `extends` to communicate + the expected shape and catch misuse at compile time. diff --git a/packages/cli/src/bridges/types.ts b/packages/cli/src/bridges/types.ts index 9afe8e9..2891a24 100644 --- a/packages/cli/src/bridges/types.ts +++ b/packages/cli/src/bridges/types.ts @@ -6,6 +6,13 @@ export interface Rule { tags?: string[]; enabled: boolean; sourceBlock?: string; + source?: string; +} + +export interface PulledEntry { + path: string; + version: string; + pulled_at: string; } export interface ProjectConfig { @@ -17,6 +24,7 @@ export interface ProjectConfig { tools: string[]; mode: 'copy' | 'link'; blocks: string[]; + pulled: PulledEntry[]; } export interface Bridge { diff --git a/packages/cli/src/commands/doctor.ts b/packages/cli/src/commands/doctor.ts index 58e6505..52b3074 100644 --- a/packages/cli/src/commands/doctor.ts +++ b/packages/cli/src/commands/doctor.ts @@ -9,7 +9,7 @@ import { cursorBridge } from '../bridges/cursor.js'; import { geminiBridge } from '../bridges/gemini.js'; import { windsurfBridge } from '../bridges/windsurf.js'; import { copilotBridge } from '../bridges/copilot.js'; -import type { Bridge, ProjectConfig, Rule } from '../bridges/types.js'; +import type { Bridge, ProjectConfig, PulledEntry, Rule } from '../bridges/types.js'; import { fileExists } from '../utils/fs.js'; import { isValidScope } from '../core/schema.js'; import * as ui from '../utils/ui.js'; @@ -184,6 +184,32 @@ export async function checkSymlinks(cwd: string, config: ProjectConfig): Promise return { passed: true, message: 'Symlinks are valid' }; } +export async function checkPulledFilesExist(cwd: string, pulled: PulledEntry[]): Promise { + if (pulled.length === 0) { + return { passed: true, message: 'Pulled files check skipped (no pulled rules)', skipped: true }; + } + + const missing: string[] = []; + + for (const entry of pulled) { + const slug = entry.path.replace(/\//g, '-'); + const fileName = `pulled-${slug}.yml`; + const filePath = join(cwd, '.dwf', 'rules', fileName); + if (!(await fileExists(filePath))) { + missing.push(fileName); + } + } + + if (missing.length > 0) { + return { + passed: false, + message: `Missing pulled rule files: ${missing.join(', ')}`, + }; + } + + return { passed: true, message: `Pulled rule files exist (${String(pulled.length)} entries)` }; +} + export async function checkHashSync(cwd: string, rules: Rule[]): Promise { const storedHash = await readStoredHash(cwd); if (storedHash === null) { @@ -273,7 +299,11 @@ async function runDoctor(): Promise { const symlinkResult = await checkSymlinks(cwd, config!); results.push(symlinkResult); - // Check 8: Hash sync (conditional on compiled files existing) + // Check 8: Pulled files exist + const pulledResult = await checkPulledFilesExist(cwd, config!.pulled); + results.push(pulledResult); + + // Check 9: Hash sync (conditional on compiled files existing) const hashResult = await checkHashSync(cwd, rules); results.push(hashResult); diff --git a/packages/cli/src/commands/list.ts b/packages/cli/src/commands/list.ts index 9e5a17d..6e8d94c 100644 --- a/packages/cli/src/commands/list.ts +++ b/packages/cli/src/commands/list.ts @@ -46,7 +46,14 @@ async function listRules(): Promise { for (const rule of active) { const severityIcon = rule.severity === 'error' ? chalk.red(ICONS.error) : rule.severity === 'warning' ? chalk.yellow(ICONS.warn) : chalk.dim(ICONS.dot); const severityColor = rule.severity === 'error' ? chalk.red : rule.severity === 'warning' ? chalk.yellow : chalk.dim; - const source = rule.sourceBlock ? chalk.dim(` [${rule.sourceBlock}]`) : ''; + let source = ''; + if (rule.source) { + source = chalk.dim(` (pulled: ${rule.source})`); + } else if (rule.sourceBlock) { + source = chalk.dim(` [${rule.sourceBlock}]`); + } else { + source = chalk.dim(` ${ICONS.arrow} manual`); + } console.log(` ${severityIcon} ${severityColor(rule.severity.padEnd(8))}${chalk.cyan(rule.scope.padEnd(15))}${rule.id}${source}`); } } diff --git a/packages/cli/src/commands/pull.ts b/packages/cli/src/commands/pull.ts new file mode 100644 index 0000000..8b47d7d --- /dev/null +++ b/packages/cli/src/commands/pull.ts @@ -0,0 +1,327 @@ +import { join } from 'node:path'; +import { readFile, writeFile, mkdir } from 'node:fs/promises'; +import type { Command } from 'commander'; +import chalk from 'chalk'; +import { stringify, parse } from 'yaml'; +import { confirm } from '@inquirer/prompts'; +import { fetchRawContent, listDirectory } from '../utils/github.js'; +import { convert } from '../core/converter.js'; +import { fileExists } from '../utils/fs.js'; +import * as cache from '../utils/cache.js'; +import * as ui from '../utils/ui.js'; +import { ICONS } from '../utils/ui.js'; +import type { PulledEntry } from '../bridges/types.js'; + +const KEBAB_RE = /^[a-z0-9]+(?:-[a-z0-9]+)*$/; + +export interface PullOptions { + list?: boolean; + noCompile?: boolean; + force?: boolean; + dryRun?: boolean; +} + +export function validateInput(input: string): { category: string; name: string } | null { + const parts = input.split('/'); + if (parts.length !== 2) return null; + + const category = parts[0]; + const name = parts[1]; + if (!category || !name) return null; + if (!KEBAB_RE.test(category) || !KEBAB_RE.test(name)) return null; + + return { category, name }; +} + +interface CachedRegistry { + categories: Array<{ + name: string; + rules: Array<{ name: string; description: string }>; + }>; +} + +async function runList(categoryFilter: string | undefined): Promise { + const cwd = process.cwd(); + + // Try cache first + const cached = await cache.getFromDisk(cwd, 'registry'); + + let registry: CachedRegistry; + + if (cached) { + registry = cached; + } else { + ui.info('Fetching available rules from GitHub...'); + ui.newline(); + + let topLevel; + try { + topLevel = await listDirectory(); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + ui.error(`Could not fetch rule registry: ${msg}`); + process.exitCode = 1; + return; + } + + const categories: CachedRegistry['categories'] = []; + + for (const entry of topLevel) { + if (entry.type !== 'dir') continue; + + try { + const files = await listDirectory(entry.name); + const rules: Array<{ name: string; description: string }> = []; + + for (const file of files) { + if (file.type !== 'file') continue; + try { + const content = await fetchRawContent(`${entry.name}/${file.name}`); + const fmMatch = /^---\n([\s\S]*?)\n---/.exec(content); + if (fmMatch?.[1]) { + const fm = parse(fmMatch[1]) as Record; + const description = typeof fm['description'] === 'string' ? fm['description'] : ''; + rules.push({ name: file.name, description }); + } + } catch { + rules.push({ name: file.name, description: '' }); + } + } + + if (rules.length > 0) { + categories.push({ name: entry.name, rules }); + } + } catch { + // Skip categories that fail to list + } + } + + registry = { categories }; + await cache.set(cwd, 'registry', registry); + } + + // Filter if category specified + const displayCategories = categoryFilter + ? registry.categories.filter((c) => c.name === categoryFilter) + : registry.categories; + + if (displayCategories.length === 0) { + if (categoryFilter) { + ui.warn(`Category "${categoryFilter}" not found`); + } else { + ui.warn('No rules available'); + } + return; + } + + ui.header('Available rules'); + ui.newline(); + + for (const category of displayCategories) { + console.log(` ${chalk.cyan(`${category.name}/`)}`); + for (const rule of category.rules) { + const desc = rule.description ? chalk.dim(` ${rule.description}`) : ''; + console.log(` ${chalk.white(rule.name.padEnd(20))}${desc}`); + } + ui.newline(); + } + + console.log(` ${chalk.dim(`Pull a rule: devw pull /`)}`); +} + +export function generateYamlOutput( + category: string, + name: string, + result: ReturnType, + pulledAt: string, +): string { + const source = `${category}/${name}`; + const githubUrl = `https://github.com/gpolanco/dev-workflows/blob/main/content/rules/${source}.md`; + + const header = [ + `# Pulled from: ${source} (v${result.version})`, + `# Source: ${githubUrl}`, + `# Do not edit manually — changes will be overwritten on next pull.`, + '', + ].join('\n'); + + const doc = { + source: { + registry: 'dev-workflows', + path: source, + version: result.version, + pulled_at: pulledAt, + }, + scope: result.scope, + rules: result.rules.map((r) => ({ + id: r.id, + severity: r.severity, + content: r.content, + tags: r.tags, + source: r.source, + })), + }; + + return header + stringify(doc, { lineWidth: 0 }); +} + +export async function updateConfig(cwd: string, entry: PulledEntry): Promise { + const configPath = join(cwd, '.dwf', 'config.yml'); + const raw = await readFile(configPath, 'utf-8'); + const doc = parse(raw) as Record; + + const pulled = Array.isArray(doc['pulled']) ? (doc['pulled'] as PulledEntry[]) : []; + + const existingIdx = pulled.findIndex((p) => p.path === entry.path); + if (existingIdx >= 0) { + pulled[existingIdx] = entry; + } else { + pulled.push(entry); + } + + doc['pulled'] = pulled; + await writeFile(configPath, stringify(doc, { lineWidth: 0 }), 'utf-8'); +} + +async function runPull(ruleArg: string | undefined, options: PullOptions): Promise { + if (options.list) { + await runList(ruleArg); + return; + } + + if (!ruleArg) { + ui.error('Specify a rule to pull', 'Usage: devw pull /'); + process.exitCode = 1; + return; + } + + const cwd = process.cwd(); + + // Validate .dwf/ exists + if (!(await fileExists(join(cwd, '.dwf', 'config.yml')))) { + ui.error('.dwf/config.yml not found', 'Run devw init to initialize the project'); + process.exitCode = 1; + return; + } + + // Validate input format + const parsed = validateInput(ruleArg); + if (!parsed) { + ui.error( + `Invalid rule path "${ruleArg}"`, + 'Format: / — both must be kebab-case (e.g., typescript/strict)', + ); + process.exitCode = 1; + return; + } + + const { category, name } = parsed; + const source = `${category}/${name}`; + const fileName = `pulled-${category}-${name}.yml`; + const filePath = join(cwd, '.dwf', 'rules', fileName); + + // Download markdown + ui.info(`Downloading ${source}...`); + + let markdown: string; + try { + markdown = await fetchRawContent(source); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + ui.error(msg); + process.exitCode = 1; + return; + } + + // Convert + let result: ReturnType; + try { + result = convert(markdown, category, name); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + ui.error(`Conversion failed: ${msg}`); + process.exitCode = 1; + return; + } + + // Check existing file + if (await fileExists(filePath)) { + try { + const existingRaw = await readFile(filePath, 'utf-8'); + const existingDoc = parse(existingRaw) as Record; + const existingSource = existingDoc['source'] as Record | undefined; + const existingVersion = typeof existingSource?.['version'] === 'string' ? existingSource['version'] : ''; + + if (existingVersion === result.version) { + ui.success(`Already up to date (${source} v${result.version})`); + return; + } + + if (!options.force) { + ui.newline(); + ui.info(`${source} already exists locally (v${existingVersion} ${ICONS.arrow} v${result.version})`); + try { + const shouldOverwrite = await confirm({ + message: 'Overwrite with new version?', + default: true, + }); + if (!shouldOverwrite) { + ui.info('Pull cancelled'); + return; + } + } catch { + ui.info('Pull cancelled'); + return; + } + } + } catch { + // Can't parse existing file — overwrite + } + } + + const pulledAt = new Date().toISOString(); + const yamlOutput = generateYamlOutput(category, name, result, pulledAt); + + // Dry run + if (options.dryRun) { + ui.newline(); + ui.header('Dry run — would write:'); + ui.newline(); + console.log(chalk.dim(` ${fileName}`)); + ui.newline(); + console.log(yamlOutput); + return; + } + + // Write file + await mkdir(join(cwd, '.dwf', 'rules'), { recursive: true }); + await writeFile(filePath, yamlOutput, 'utf-8'); + + // Update config + const entry: PulledEntry = { + path: source, + version: result.version, + pulled_at: pulledAt, + }; + await updateConfig(cwd, entry); + + ui.success(`Pulled ${source} (${String(result.rules.length)} rules)`); + + // Auto-compile + if (!options.noCompile) { + const { runCompileFromAdd } = await import('./compile.js'); + await runCompileFromAdd(); + } +} + +export function registerPullCommand(program: Command): void { + program + .command('pull') + .argument('[rule]', 'Rule path: / (e.g., typescript/strict)') + .description('Pull rules from the official dev-workflows registry') + .option('--list', 'List available rules') + .option('--no-compile', 'Skip auto-compile after pull') + .option('--force', 'Overwrite without asking') + .option('--dry-run', 'Show output without writing files') + .action((rule: string | undefined, options: PullOptions) => runPull(rule, options)); +} diff --git a/packages/cli/src/core/converter.ts b/packages/cli/src/core/converter.ts new file mode 100644 index 0000000..31eeeab --- /dev/null +++ b/packages/cli/src/core/converter.ts @@ -0,0 +1,164 @@ +import { parse as parseYaml } from 'yaml'; +import { isValidScope } from './schema.js'; + +export interface ConvertedRule { + id: string; + scope: string; + severity: 'error'; + content: string; + tags: string[]; + source: string; +} + +export interface ConvertResult { + name: string; + description: string; + version: string; + scope: string; + tags: string[]; + rules: ConvertedRule[]; +} + +interface Frontmatter { + name: string; + description: string; + version: string; + scope: string; + tags: string[]; +} + +function parseFrontmatter(markdown: string): { frontmatter: Frontmatter; body: string } { + const fmRegex = /^---\n([\s\S]*?)\n---\n?([\s\S]*)$/; + const match = fmRegex.exec(markdown); + + if (!match?.[1]) { + throw new Error('Invalid rule file: missing YAML frontmatter'); + } + + const raw: unknown = parseYaml(match[1]); + if (!raw || typeof raw !== 'object') { + throw new Error('Invalid rule file: frontmatter is not an object'); + } + + const fm = raw as Record; + + const name = typeof fm['name'] === 'string' ? fm['name'] : ''; + const description = typeof fm['description'] === 'string' ? fm['description'] : ''; + const version = typeof fm['version'] === 'string' ? fm['version'] : '0.1.0'; + const scope = typeof fm['scope'] === 'string' ? fm['scope'] : 'conventions'; + const tags = Array.isArray(fm['tags']) + ? fm['tags'].filter((t): t is string => typeof t === 'string') + : []; + + if (!name) { + throw new Error('Invalid rule file: missing "name" in frontmatter'); + } + if (!description) { + throw new Error('Invalid rule file: missing "description" in frontmatter'); + } + if (!isValidScope(scope)) { + throw new Error(`Invalid rule file: invalid scope "${scope}"`); + } + + return { + frontmatter: { name, description, version, scope, tags }, + body: match[2] ?? '', + }; +} + +function toKebabCase(text: string): string { + return text + .toLowerCase() + .replace(/[^a-z0-9\s-]/g, '') + .replace(/\s+/g, '-') + .replace(/-+/g, '-') + .replace(/^-|-$/g, ''); +} + +interface ParsedBullet { + heading: string | null; + content: string; + index: number; +} + +function parseBullets(body: string): ParsedBullet[] { + const lines = body.split('\n'); + const bullets: ParsedBullet[] = []; + let currentHeading: string | null = null; + let headingIndex = 0; + let currentBullet: string | null = null; + + function flushBullet(): void { + if (currentBullet !== null) { + bullets.push({ + heading: currentHeading, + content: currentBullet.trimEnd(), + index: headingIndex++, + }); + currentBullet = null; + } + } + + for (const line of lines) { + if (line.startsWith('## ')) { + flushBullet(); + currentHeading = toKebabCase(line.slice(3).trim()); + headingIndex = 0; + continue; + } + + if (line.startsWith('- ')) { + flushBullet(); + currentBullet = line.slice(2); + continue; + } + + // Indented continuation (2+ spaces) of a bullet + if (currentBullet !== null && line.length > 0 && /^\s{2,}/.test(line)) { + currentBullet += '\n' + line.trimStart(); + continue; + } + + // Empty line — might be between bullets, keep accumulating + if (currentBullet !== null && line.trim() === '') { + continue; + } + + // Non-bullet, non-heading content — flush if we had a bullet + if (currentBullet !== null && line.trim() !== '') { + flushBullet(); + } + } + + flushBullet(); + return bullets; +} + +export function convert(markdown: string, category: string, name: string): ConvertResult { + const { frontmatter, body } = parseFrontmatter(markdown); + const bullets = parseBullets(body); + const source = `${category}/${name}`; + + const rules: ConvertedRule[] = bullets.map((bullet) => { + const headingPart = bullet.heading ? `-${bullet.heading}` : ''; + const id = `pulled-${category}-${name}${headingPart}-${String(bullet.index)}`; + + return { + id, + scope: frontmatter.scope, + severity: 'error' as const, + content: bullet.content, + tags: [...frontmatter.tags], + source, + }; + }); + + return { + name: frontmatter.name, + description: frontmatter.description, + version: frontmatter.version, + scope: frontmatter.scope, + tags: frontmatter.tags, + rules, + }; +} diff --git a/packages/cli/src/core/parser.ts b/packages/cli/src/core/parser.ts index 540f553..e09910b 100644 --- a/packages/cli/src/core/parser.ts +++ b/packages/cli/src/core/parser.ts @@ -1,7 +1,7 @@ import { readFile, readdir } from 'node:fs/promises'; import { join } from 'node:path'; import { parse } from 'yaml'; -import type { Rule, ProjectConfig } from '../bridges/types.js'; +import type { Rule, ProjectConfig, PulledEntry } from '../bridges/types.js'; import { isValidScope } from './schema.js'; interface RawRule { @@ -11,6 +11,7 @@ interface RawRule { tags?: string[]; enabled?: boolean; sourceBlock?: string; + source?: string; } interface RawRuleFile { @@ -55,12 +56,25 @@ export async function readConfig(cwd: string): Promise { ? blocksRaw.filter((b): b is string => typeof b === 'string') : []; + const pulledRaw = doc['pulled']; + const pulled: PulledEntry[] = Array.isArray(pulledRaw) + ? pulledRaw + .filter((p): p is Record => p !== null && typeof p === 'object') + .map((p) => ({ + path: typeof p['path'] === 'string' ? p['path'] : '', + version: typeof p['version'] === 'string' ? p['version'] : '', + pulled_at: typeof p['pulled_at'] === 'string' ? p['pulled_at'] : '', + })) + .filter((p) => p.path !== '') + : []; + return { version, project: { name: projectName, description: projectDescription }, tools, mode: modeRaw, blocks, + pulled, }; } @@ -80,6 +94,7 @@ function normalizeRule(raw: RawRule, scope: string): Rule | null { tags: raw.tags, enabled, sourceBlock: raw.sourceBlock, + source: raw.source, }; } diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index f688cdd..755d4b9 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -8,6 +8,7 @@ import { registerRemoveCommand } from './commands/remove.js'; import { registerListCommand } from './commands/list.js'; import { registerExplainCommand } from './commands/explain.js'; import { registerWatchCommand } from './commands/watch.js'; +import { registerPullCommand } from './commands/pull.js'; const require = createRequire(import.meta.url); const pkg = require('../package.json') as { version: string }; @@ -27,6 +28,7 @@ registerRemoveCommand(program); registerListCommand(program); registerExplainCommand(program); registerWatchCommand(program); +registerPullCommand(program); program.parse(); diff --git a/packages/cli/src/utils/cache.ts b/packages/cli/src/utils/cache.ts new file mode 100644 index 0000000..411d733 --- /dev/null +++ b/packages/cli/src/utils/cache.ts @@ -0,0 +1,55 @@ +import { readFile, writeFile, mkdir } from 'node:fs/promises'; +import { join, dirname } from 'node:path'; + +const TTL_MS = 3_600_000; // 1 hour + +interface CacheEntry { + data: T; + timestamp: number; +} + +type CacheStore = Record>; + +function getCachePath(cwd: string): string { + return join(cwd, '.dwf', '.cache', 'registry.json'); +} + +async function readStore(cwd: string): Promise { + try { + const raw = await readFile(getCachePath(cwd), 'utf-8'); + const parsed: unknown = JSON.parse(raw); + if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) { + return parsed as CacheStore; + } + return {}; + } catch { + return {}; + } +} + +async function writeStore(cwd: string, store: CacheStore): Promise { + const cachePath = getCachePath(cwd); + await mkdir(dirname(cachePath), { recursive: true }); + await writeFile(cachePath, JSON.stringify(store, null, 2), 'utf-8'); +} + +export function get(_cwd: string, key: string, store: CacheStore): T | null { + const entry = store[key] as CacheEntry | undefined; + if (!entry) return null; + + const age = Date.now() - entry.timestamp; + if (age > TTL_MS) return null; + + return entry.data; +} + +export async function getFromDisk(cwd: string, key: string): Promise { + const store = await readStore(cwd); + return get(cwd, key, store); +} + +export async function set(cwd: string, key: string, value: T): Promise { + const store = await readStore(cwd); + store[key] = { data: value, timestamp: Date.now() }; + await writeStore(cwd, store); +} diff --git a/packages/cli/src/utils/github.ts b/packages/cli/src/utils/github.ts new file mode 100644 index 0000000..fcf75cf --- /dev/null +++ b/packages/cli/src/utils/github.ts @@ -0,0 +1,91 @@ +const BRANCH = 'main'; +const RAW_BASE = `https://raw.githubusercontent.com/gpolanco/dev-workflows/${BRANCH}/content/rules`; +const API_BASE = 'https://api.github.com/repos/gpolanco/dev-workflows/contents/content/rules'; + +export class GitHubError extends Error { + constructor( + message: string, + public readonly statusCode: number, + ) { + super(message); + this.name = 'GitHubError'; + } +} + +function handleResponseError(status: number, path: string): never { + if (status === 404) { + throw new GitHubError(`Rule not found: ${path}`, 404); + } + if (status === 403) { + throw new GitHubError( + 'GitHub API rate limit exceeded. Try again later or set a GITHUB_TOKEN environment variable.', + 403, + ); + } + throw new GitHubError(`GitHub request failed (HTTP ${String(status)})`, status); +} + +export async function fetchRawContent(path: string): Promise { + const url = `${RAW_BASE}/${path}.md`; + + let response: Response; + try { + response = await fetch(url); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + throw new GitHubError(`Network error fetching rule: ${msg}`, 0); + } + + if (!response.ok) { + handleResponseError(response.status, path); + } + + return response.text(); +} + +interface GitHubContentsEntry { + name: string; + type: string; +} + +export interface DirectoryEntry { + name: string; + type: 'file' | 'dir'; +} + +export async function listDirectory(path?: string): Promise { + const segments = [API_BASE]; + if (path) segments.push(path); + const base = segments.join('/'); + const url = `${base}?ref=${BRANCH}`; + + const headers: Record = { + Accept: 'application/vnd.github.v3+json', + }; + + const token = process.env['GITHUB_TOKEN']; + if (token) { + headers['Authorization'] = `Bearer ${token}`; + } + + let response: Response; + try { + response = await fetch(url, { headers }); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + throw new GitHubError(`Network error listing directory: ${msg}`, 0); + } + + if (!response.ok) { + handleResponseError(response.status, path ?? 'rules'); + } + + const data = (await response.json()) as GitHubContentsEntry[]; + + return data + .filter((entry) => entry.type === 'file' || entry.type === 'dir') + .map((entry) => ({ + name: entry.name.replace(/\.md$/, ''), + type: entry.type === 'dir' ? ('dir' as const) : ('file' as const), + })); +} diff --git a/packages/cli/tests/bridges/copilot.test.ts b/packages/cli/tests/bridges/copilot.test.ts index cbef146..0653a2b 100644 --- a/packages/cli/tests/bridges/copilot.test.ts +++ b/packages/cli/tests/bridges/copilot.test.ts @@ -20,6 +20,7 @@ const CONFIG: ProjectConfig = { tools: ['copilot'], mode: 'copy', blocks: [], + pulled: [], }; describe('copilotBridge', () => { diff --git a/packages/cli/tests/bridges/windsurf.test.ts b/packages/cli/tests/bridges/windsurf.test.ts index 7de692b..cf83a09 100644 --- a/packages/cli/tests/bridges/windsurf.test.ts +++ b/packages/cli/tests/bridges/windsurf.test.ts @@ -20,6 +20,7 @@ const CONFIG: ProjectConfig = { tools: ['windsurf'], mode: 'copy', blocks: [], + pulled: [], }; describe('windsurfBridge', () => { diff --git a/packages/cli/tests/commands/doctor.edge.test.ts b/packages/cli/tests/commands/doctor.edge.test.ts new file mode 100644 index 0000000..aaefdce --- /dev/null +++ b/packages/cli/tests/commands/doctor.edge.test.ts @@ -0,0 +1,55 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, mkdir, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { checkPulledFilesExist } from '../../src/commands/doctor.js'; +import type { PulledEntry } from '../../src/bridges/types.js'; + +describe('doctor edge cases', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'dwf-doctor-edge-')); + await mkdir(join(tempDir, '.dwf', 'rules'), { recursive: true }); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + describe('checkPulledFilesExist', () => { + it('reports only missing files when some exist and some do not', async () => { + // Create one pulled file but not the other + await writeFile(join(tempDir, '.dwf', 'rules', 'pulled-typescript-strict.yml'), 'scope: conventions\nrules: []\n', 'utf-8'); + + const pulled: PulledEntry[] = [ + { path: 'typescript/strict', version: '0.1.0', pulled_at: '2026-01-01T00:00:00Z' }, + { path: 'javascript/react', version: '0.1.0', pulled_at: '2026-01-01T00:00:00Z' }, + ]; + + const result = await checkPulledFilesExist(tempDir, pulled); + assert.equal(result.passed, false); + assert.ok(result.message.includes('pulled-javascript-react.yml')); + assert.ok(!result.message.includes('pulled-typescript-strict.yml')); + }); + + it('converts slashes in path to hyphens for filename check', async () => { + // Path "typescript/strict" → filename "pulled-typescript-strict.yml" + await writeFile(join(tempDir, '.dwf', 'rules', 'pulled-typescript-strict.yml'), 'scope: conventions\nrules: []\n', 'utf-8'); + + const pulled: PulledEntry[] = [ + { path: 'typescript/strict', version: '0.1.0', pulled_at: '2026-01-01T00:00:00Z' }, + ]; + + const result = await checkPulledFilesExist(tempDir, pulled); + assert.equal(result.passed, true); + }); + + it('returns skipped: true when pulled array is empty', async () => { + const result = await checkPulledFilesExist(tempDir, []); + assert.equal(result.passed, true); + assert.equal(result.skipped, true); + }); + }); +}); diff --git a/packages/cli/tests/commands/doctor.test.ts b/packages/cli/tests/commands/doctor.test.ts index 99b859e..3ca858d 100644 --- a/packages/cli/tests/commands/doctor.test.ts +++ b/packages/cli/tests/commands/doctor.test.ts @@ -212,6 +212,7 @@ blocks: [] tools: ['claude', 'cursor', 'gemini'], mode: 'copy', blocks: [], + pulled: [], }; const result = checkBridgesAvailable(config); @@ -225,6 +226,7 @@ blocks: [] tools: ['claude', 'windsurf', 'copilot'], mode: 'copy', blocks: [], + pulled: [], }; const result = checkBridgesAvailable(config); @@ -238,6 +240,7 @@ blocks: [] tools: ['claude', 'vscode'], mode: 'copy', blocks: [], + pulled: [], }; const result = checkBridgesAvailable(config); @@ -254,6 +257,7 @@ blocks: [] tools: ['claude'], mode: 'copy', blocks: [], + pulled: [], }; const result = await checkSymlinks(tmpDir, config); @@ -268,6 +272,7 @@ blocks: [] tools: ['claude'], mode: 'link', blocks: [], + pulled: [], }; // Create a target file and a symlink pointing to it @@ -288,6 +293,7 @@ blocks: [] tools: ['claude'], mode: 'link', blocks: [], + pulled: [], }; // Create a symlink pointing to a non-existent target diff --git a/packages/cli/tests/commands/pull.edge.test.ts b/packages/cli/tests/commands/pull.edge.test.ts new file mode 100644 index 0000000..8fb0997 --- /dev/null +++ b/packages/cli/tests/commands/pull.edge.test.ts @@ -0,0 +1,215 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, mkdir, writeFile, readFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { parse } from 'yaml'; +import { validateInput, generateYamlOutput, updateConfig } from '../../src/commands/pull.js'; +import { convert } from '../../src/core/converter.js'; +import type { PulledEntry } from '../../src/bridges/types.js'; + +const MOCK_MARKDOWN = `--- +name: strict +description: "Strict TypeScript conventions" +version: "0.2.0" +scope: conventions +tags: [typescript, strict] +--- + +## Type Safety + +- Never use any. Use unknown instead. +`; + +async function createProject(dir: string, extraYaml = ''): Promise { + await mkdir(join(dir, '.dwf', 'rules'), { recursive: true }); + await writeFile( + join(dir, '.dwf', 'config.yml'), + `version: "0.1" +project: + name: test-project +tools: + - claude +mode: copy +blocks: [] +${extraYaml}`, + 'utf-8', + ); +} + +describe('pull edge cases', () => { + describe('validateInput', () => { + it('returns null for empty string', () => { + assert.equal(validateInput(''), null); + }); + + it('returns null for single segment (no slash)', () => { + assert.equal(validateInput('typescript'), null); + }); + + it('returns null for triple segment (two slashes)', () => { + assert.equal(validateInput('a/b/c'), null); + }); + + it('returns null for trailing slash', () => { + assert.equal(validateInput('typescript/'), null); + }); + + it('returns null for leading slash', () => { + assert.equal(validateInput('/strict'), null); + }); + + it('returns null for empty segments (double slash)', () => { + assert.equal(validateInput('/'), null); + }); + + it('returns null for uppercase input', () => { + assert.equal(validateInput('TypeScript/Strict'), null); + }); + + it('returns null for underscores', () => { + assert.equal(validateInput('type_script/my_rule'), null); + }); + + it('returns null for dots in segment', () => { + assert.equal(validateInput('type.script/rule'), null); + }); + + it('returns null for spaces in segment', () => { + assert.equal(validateInput('type script/rule'), null); + }); + + it('returns valid result for segments with only numbers', () => { + const result = validateInput('123/456'); + assert.deepEqual(result, { category: '123', name: '456' }); + }); + }); + + describe('updateConfig', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'dwf-pull-edge-')); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + it('preserves existing entries when adding a new one', async () => { + await createProject(tempDir, `pulled: + - path: "javascript/react" + version: "0.1.0" + pulled_at: "2026-01-01T00:00:00Z"`); + + const newEntry: PulledEntry = { + path: 'typescript/strict', + version: '0.2.0', + pulled_at: '2026-02-11T00:00:00Z', + }; + + await updateConfig(tempDir, newEntry); + + const raw = await readFile(join(tempDir, '.dwf', 'config.yml'), 'utf-8'); + const doc = parse(raw) as Record; + const pulled = doc['pulled'] as PulledEntry[]; + assert.equal(pulled.length, 2); + assert.equal(pulled[0]?.path, 'javascript/react'); + assert.equal(pulled[1]?.path, 'typescript/strict'); + }); + + it('replaces entry with same path (no duplication)', async () => { + await createProject(tempDir, `pulled: + - path: "typescript/strict" + version: "0.1.0" + pulled_at: "2026-01-01T00:00:00Z"`); + + const updatedEntry: PulledEntry = { + path: 'typescript/strict', + version: '0.3.0', + pulled_at: '2026-02-11T00:00:00Z', + }; + + await updateConfig(tempDir, updatedEntry); + + const raw = await readFile(join(tempDir, '.dwf', 'config.yml'), 'utf-8'); + const doc = parse(raw) as Record; + const pulled = doc['pulled'] as PulledEntry[]; + assert.equal(pulled.length, 1); + assert.equal(pulled[0]?.version, '0.3.0'); + }); + + it('creates pulled array when config has no pulled key', async () => { + await createProject(tempDir); + + const entry: PulledEntry = { + path: 'typescript/strict', + version: '0.1.0', + pulled_at: '2026-02-11T00:00:00Z', + }; + + await updateConfig(tempDir, entry); + + const raw = await readFile(join(tempDir, '.dwf', 'config.yml'), 'utf-8'); + const doc = parse(raw) as Record; + const pulled = doc['pulled'] as PulledEntry[]; + assert.ok(Array.isArray(pulled)); + assert.equal(pulled.length, 1); + assert.equal(pulled[0]?.path, 'typescript/strict'); + }); + }); + + describe('generateYamlOutput', () => { + it('header contains path, version, and GitHub URL', () => { + const result = convert(MOCK_MARKDOWN, 'typescript', 'strict'); + const output = generateYamlOutput('typescript', 'strict', result, '2026-02-11T00:00:00Z'); + + assert.ok(output.includes('typescript/strict')); + assert.ok(output.includes('v0.2.0')); + assert.ok(output.includes('https://github.com/gpolanco/dev-workflows/blob/main/content/rules/typescript/strict.md')); + }); + + it('roundtrip YAML: generate → parse → validate fields', () => { + const result = convert(MOCK_MARKDOWN, 'typescript', 'strict'); + const output = generateYamlOutput('typescript', 'strict', result, '2026-02-11T00:00:00Z'); + + // Strip comment header lines before parsing + const yamlBody = output.split('\n').filter((l) => !l.startsWith('#')).join('\n'); + const doc = parse(yamlBody) as Record; + + const source = doc['source'] as Record; + assert.equal(source['registry'], 'dev-workflows'); + assert.equal(source['path'], 'typescript/strict'); + assert.equal(source['version'], '0.2.0'); + assert.equal(source['pulled_at'], '2026-02-11T00:00:00Z'); + + const rules = doc['rules'] as Array>; + assert.ok(rules.length > 0); + assert.equal(rules[0]?.['severity'], 'error'); + assert.equal(rules[0]?.['source'], 'typescript/strict'); + }); + + it('special characters in content survive YAML roundtrip', () => { + const specialMd = `--- +name: special +description: "Special chars test" +version: "0.1.0" +scope: conventions +tags: [] +--- + +- Use backticks for \`code\`, colons: like this, and "quotes" in content. +`; + const result = convert(specialMd, 'test', 'special'); + const output = generateYamlOutput('test', 'special', result, '2026-02-11T00:00:00Z'); + + const yamlBody = output.split('\n').filter((l) => !l.startsWith('#')).join('\n'); + const doc = parse(yamlBody) as Record; + const rules = doc['rules'] as Array>; + const content = rules[0]?.['content'] as string; + assert.ok(content.includes('`code`')); + assert.ok(content.includes('colons:')); + assert.ok(content.includes('"quotes"')); + }); + }); +}); diff --git a/packages/cli/tests/commands/pull.test.ts b/packages/cli/tests/commands/pull.test.ts new file mode 100644 index 0000000..b687f9f --- /dev/null +++ b/packages/cli/tests/commands/pull.test.ts @@ -0,0 +1,275 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, mkdir, writeFile, readFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { parse, stringify } from 'yaml'; +import { fileExists } from '../../src/utils/fs.js'; +import { convert } from '../../src/core/converter.js'; +import { readConfig } from '../../src/core/parser.js'; +import { readRules } from '../../src/core/parser.js'; +import type { PulledEntry } from '../../src/bridges/types.js'; + +const MOCK_MARKDOWN = `--- +name: strict +description: "Strict TypeScript conventions" +version: "0.1.0" +scope: conventions +tags: [typescript, strict] +--- + +## Type Safety + +- Never use any. Use unknown instead. + Narrow with type guards. + +- Always declare explicit return types. +`; + +async function createProject(dir: string): Promise { + await mkdir(join(dir, '.dwf', 'rules'), { recursive: true }); + await writeFile( + join(dir, '.dwf', 'config.yml'), + `version: "0.1" +project: + name: test-project +tools: + - claude +mode: copy +blocks: [] +`, + 'utf-8', + ); +} + +describe('pull command integration', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'dwf-pull-test-')); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + describe('converter integration', () => { + it('converts markdown to correct YAML structure', () => { + const result = convert(MOCK_MARKDOWN, 'typescript', 'strict'); + + assert.equal(result.name, 'strict'); + assert.equal(result.version, '0.1.0'); + assert.equal(result.scope, 'conventions'); + assert.equal(result.rules.length, 2); + + const first = result.rules[0]; + const second = result.rules[1]; + assert.ok(first); + assert.ok(second); + assert.equal(first.id, 'pulled-typescript-strict-type-safety-0'); + assert.equal(second.id, 'pulled-typescript-strict-type-safety-1'); + assert.ok(first.content.includes('Never use any')); + assert.ok(first.content.includes('Narrow with type guards')); + }); + }); + + describe('input validation', () => { + it('validates kebab-case correctly', () => { + const validKebab = /^[a-z0-9]+(?:-[a-z0-9]+)*$/; + assert.equal(validKebab.test('typescript'), true); + assert.equal(validKebab.test('TypeScript'), false); + assert.equal(validKebab.test('my-rule'), true); + assert.equal(validKebab.test('my_rule'), false); + assert.equal(validKebab.test(''), false); + }); + + it('rejects non-kebab-case segments', () => { + const validKebab = /^[a-z0-9]+(?:-[a-z0-9]+)*$/; + assert.equal(validKebab.test('TypeScript'), false); + assert.equal(validKebab.test('my_rule'), false); + assert.equal(validKebab.test('MY-RULE'), false); + assert.equal(validKebab.test('rule.name'), false); + }); + }); + + describe('config update', () => { + it('adds pulled entry to config', async () => { + await createProject(tempDir); + + const configPath = join(tempDir, '.dwf', 'config.yml'); + const raw = await readFile(configPath, 'utf-8'); + const doc = parse(raw) as Record; + + const pulled: PulledEntry[] = Array.isArray(doc['pulled']) ? doc['pulled'] as PulledEntry[] : []; + pulled.push({ + path: 'typescript/strict', + version: '0.1.0', + pulled_at: '2026-02-11T00:00:00Z', + }); + doc['pulled'] = pulled; + + await writeFile(configPath, stringify(doc, { lineWidth: 0 }), 'utf-8'); + + const updated = parse(await readFile(configPath, 'utf-8')) as Record; + const updatedPulled = updated['pulled'] as PulledEntry[]; + assert.ok(updatedPulled); + assert.equal(updatedPulled.length, 1); + const entry = updatedPulled[0]; + assert.ok(entry); + assert.equal(entry.path, 'typescript/strict'); + }); + }); + + describe('pulled file generation', () => { + it('generates valid YAML with source metadata', async () => { + await createProject(tempDir); + + const result = convert(MOCK_MARKDOWN, 'typescript', 'strict'); + + const doc = { + source: { + registry: 'dev-workflows', + path: 'typescript/strict', + version: result.version, + pulled_at: '2026-02-11T00:00:00Z', + }, + scope: result.scope, + rules: result.rules.map((r) => ({ + id: r.id, + severity: r.severity, + content: r.content, + tags: r.tags, + source: r.source, + })), + }; + + const yaml = stringify(doc, { lineWidth: 0 }); + const filePath = join(tempDir, '.dwf', 'rules', 'pulled-typescript-strict.yml'); + await writeFile(filePath, yaml, 'utf-8'); + + assert.ok(await fileExists(filePath)); + + const parsed = parse(await readFile(filePath, 'utf-8')) as Record; + const source = parsed['source'] as Record; + assert.ok(source); + assert.equal(source['registry'], 'dev-workflows'); + assert.equal(source['path'], 'typescript/strict'); + assert.equal(source['version'], '0.1.0'); + + const rules = parsed['rules'] as Array>; + assert.ok(rules); + assert.equal(rules.length, 2); + const firstRule = rules[0]; + assert.ok(firstRule); + assert.equal(firstRule['id'], 'pulled-typescript-strict-type-safety-0'); + assert.equal(firstRule['source'], 'typescript/strict'); + }); + }); + + describe('already up to date', () => { + it('detects same version as already up to date', async () => { + await createProject(tempDir); + + const existingDoc = { + source: { + registry: 'dev-workflows', + path: 'typescript/strict', + version: '0.1.0', + pulled_at: '2026-02-10T00:00:00Z', + }, + scope: 'conventions', + rules: [], + }; + + const filePath = join(tempDir, '.dwf', 'rules', 'pulled-typescript-strict.yml'); + await writeFile(filePath, stringify(existingDoc, { lineWidth: 0 }), 'utf-8'); + + const existing = parse(await readFile(filePath, 'utf-8')) as Record; + const existingSource = existing['source'] as Record; + assert.ok(existingSource); + assert.equal(existingSource['version'], '0.1.0'); + + const result = convert(MOCK_MARKDOWN, 'typescript', 'strict'); + assert.equal(result.version, existingSource['version']); + }); + }); + + describe('dry run', () => { + it('does not write files in dry run mode', async () => { + await createProject(tempDir); + + const filePath = join(tempDir, '.dwf', 'rules', 'pulled-typescript-strict.yml'); + assert.equal(await fileExists(filePath), false); + }); + }); + + describe('parser reads pulled config', () => { + it('reads pulled entries from config.yml', async () => { + await createProject(tempDir); + + const configPath = join(tempDir, '.dwf', 'config.yml'); + await writeFile( + configPath, + `version: "0.1" +project: + name: test-project +tools: + - claude +mode: copy +blocks: [] +pulled: + - path: "typescript/strict" + version: "0.1.0" + pulled_at: "2026-02-11T00:00:00Z" +`, + 'utf-8', + ); + + const config = await readConfig(tempDir); + assert.equal(config.pulled.length, 1); + const entry = config.pulled[0]; + assert.ok(entry); + assert.equal(entry.path, 'typescript/strict'); + assert.equal(entry.version, '0.1.0'); + }); + + it('defaults to empty pulled array when field is missing', async () => { + await createProject(tempDir); + + const config = await readConfig(tempDir); + assert.deepEqual(config.pulled, []); + }); + }); + + describe('readRules reads pulled files with source', () => { + it('loads rules with source field from pulled files', async () => { + await createProject(tempDir); + + const pulledDoc = { + scope: 'conventions', + rules: [ + { + id: 'pulled-typescript-strict-0', + severity: 'error', + content: 'Test rule content', + tags: ['typescript'], + source: 'typescript/strict', + }, + ], + }; + + await writeFile( + join(tempDir, '.dwf', 'rules', 'pulled-typescript-strict.yml'), + stringify(pulledDoc, { lineWidth: 0 }), + 'utf-8', + ); + + const rules = await readRules(tempDir); + assert.equal(rules.length, 1); + const first = rules[0]; + assert.ok(first); + assert.equal(first.source, 'typescript/strict'); + assert.equal(first.id, 'pulled-typescript-strict-0'); + }); + }); +}); diff --git a/packages/cli/tests/core/converter.edge.test.ts b/packages/cli/tests/core/converter.edge.test.ts new file mode 100644 index 0000000..ad1dce0 --- /dev/null +++ b/packages/cli/tests/core/converter.edge.test.ts @@ -0,0 +1,226 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { convert } from '../../src/core/converter.js'; + +function ruleAt(result: ReturnType, index: number) { + const rule = result.rules[index]; + if (!rule) throw new Error(`No rule at index ${String(index)}`); + return rule; +} + +function md(frontmatter: string, body: string): string { + return `---\n${frontmatter}\n---\n${body}`; +} + +const BASE_FM = `name: test +description: "Test rules" +version: "0.1.0" +scope: conventions`; + +describe('converter edge cases', () => { + describe('empty / minimal body', () => { + it('returns 0 rules when body is empty (only frontmatter)', () => { + const result = convert(md(BASE_FM, ''), 'cat', 'name'); + assert.equal(result.rules.length, 0); + }); + + it('returns 0 rules when body has only headings without bullets', () => { + const result = convert(md(BASE_FM, '\n## Heading One\n\n## Heading Two\n'), 'cat', 'name'); + assert.equal(result.rules.length, 0); + }); + }); + + describe('frontmatter defaults and coercion', () => { + it('defaults version to "0.1.0" when version is a number', () => { + const fm = `name: test +description: "Test" +version: 1 +scope: conventions`; + const result = convert(md(fm, '\n- rule\n'), 'cat', 'name'); + assert.equal(result.version, '0.1.0'); + }); + + it('defaults tags to [] when tags is a string', () => { + const fm = `name: test +description: "Test" +version: "0.1.0" +scope: conventions +tags: "not-an-array"`; + const result = convert(md(fm, '\n- rule\n'), 'cat', 'name'); + assert.deepEqual(result.tags, []); + }); + + it('filters non-string items in tags array', () => { + const fm = `name: test +description: "Test" +version: "0.1.0" +scope: conventions +tags: [valid, 123, true]`; + const result = convert(md(fm, '\n- rule\n'), 'cat', 'name'); + assert.deepEqual(result.tags, ['valid']); + }); + }); + + describe('frontmatter error cases', () => { + it('throws "not an object" when frontmatter is a scalar string', () => { + const raw = `---\njust a string\n---\n- rule`; + assert.throws(() => convert(raw, 'cat', 'name'), /not an object/); + }); + + it('throws "missing name" when frontmatter is a YAML array', () => { + const raw = `---\n- item1\n- item2\n---\n- rule`; + assert.throws(() => convert(raw, 'cat', 'name'), /missing "name"/); + }); + }); + + describe('bullets before and after headings', () => { + it('assigns no heading segment to bullets before any heading', () => { + const body = ` +- orphan bullet + +## Section + +- section bullet +`; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + assert.equal(result.rules.length, 2); + assert.equal(ruleAt(result, 0).id, 'pulled-cat-name-0'); + assert.ok(!ruleAt(result, 0).id.includes('section')); + }); + + it('assigns correct IDs for mix of bullets before and after heading', () => { + const body = ` +- before heading + +## My Section + +- after heading +`; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + assert.equal(ruleAt(result, 0).id, 'pulled-cat-name-0'); + assert.equal(ruleAt(result, 1).id, 'pulled-cat-name-my-section-0'); + }); + }); + + describe('heading slug via toKebabCase', () => { + it('converts heading with special characters to kebab-case', () => { + const body = ` +## Type Safety & Best Practices! + +- rule +`; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + assert.equal(ruleAt(result, 0).id, 'pulled-cat-name-type-safety-best-practices-0'); + }); + + it('handles heading with leading/trailing special chars', () => { + const body = ` +## ---Leading Trailing--- + +- rule +`; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + assert.equal(ruleAt(result, 0).id, 'pulled-cat-name-leading-trailing-0'); + }); + + it('handles heading with numbers', () => { + const body = ` +## Rule 42 Tips + +- rule +`; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + assert.equal(ruleAt(result, 0).id, 'pulled-cat-name-rule-42-tips-0'); + }); + + it('collapses consecutive hyphens in heading slug', () => { + const body = ` +## Foo --- Bar + +- rule +`; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + assert.equal(ruleAt(result, 0).id, 'pulled-cat-name-foo-bar-0'); + }); + + it('handles heading made entirely of symbols (empty slug is falsy → no heading segment)', () => { + const body = ` +## @#$%^&* + +- rule +`; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + // toKebabCase("@#$%^&*") → "" (falsy) → headingPart = "" → no heading segment in ID + assert.equal(ruleAt(result, 0).id, 'pulled-cat-name-0'); + }); + }); + + describe('bullet edge cases', () => { + it('captures empty bullet content ("- " with trailing space, no text)', () => { + // Must be "- " (dash + space) to match startsWith('- ') + const raw = md(BASE_FM, '\n- \n'); + const result = convert(raw, 'cat', 'name'); + assert.equal(result.rules.length, 1); + assert.equal(ruleAt(result, 0).content, ''); + }); + + it('bare dash without space is not a bullet', () => { + const body = `\n-\n`; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + assert.equal(result.rules.length, 0); + }); + + it('h3 (###) does not act as heading separator', () => { + const body = ` +## Real Heading + +- bullet one + +### Not A Heading + +- bullet two +`; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + // ### flushes the current bullet (non-bullet line), but both bullets share the same h2 heading + assert.equal(ruleAt(result, 0).id, 'pulled-cat-name-real-heading-0'); + assert.equal(ruleAt(result, 1).id, 'pulled-cat-name-real-heading-1'); + }); + + it('flushes current bullet when non-bullet text appears after it', () => { + const body = ` +- bullet content +Some plain text paragraph +`; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + assert.equal(result.rules.length, 1); + assert.equal(ruleAt(result, 0).content, 'bullet content'); + }); + + it('\\r\\n line endings break frontmatter parsing (known limitation)', () => { + const raw = `---\r\n${BASE_FM}\r\n---\r\n\r\n## Section\r\n\r\n- bullet one\r\n- bullet two\r\n`; + // Frontmatter regex requires \n, so \r\n causes "missing YAML frontmatter" + assert.throws(() => convert(raw, 'cat', 'name'), /missing YAML frontmatter/); + }); + + it('\\r\\n in body only (\\n in frontmatter) extracts bullets with \\r trimmed', () => { + const body = '\r\n## Section\r\n\r\n- bullet one\r\n- bullet two\r\n'; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + assert.equal(result.rules.length, 2); + }); + + it('multiple empty lines between bullets do not break accumulation', () => { + const body = ` +- first bullet + + +- second bullet + + + +- third bullet +`; + const result = convert(md(BASE_FM, body), 'cat', 'name'); + assert.equal(result.rules.length, 3); + }); + }); +}); diff --git a/packages/cli/tests/core/converter.test.ts b/packages/cli/tests/core/converter.test.ts new file mode 100644 index 0000000..6979970 --- /dev/null +++ b/packages/cli/tests/core/converter.test.ts @@ -0,0 +1,196 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { convert } from '../../src/core/converter.js'; + +function ruleAt(result: ReturnType, index: number) { + const rule = result.rules[index]; + if (!rule) throw new Error(`No rule at index ${String(index)}`); + return rule; +} + +const VALID_MD = `--- +name: react +description: "React conventions for AI coding agents" +version: "0.1.0" +scope: conventions +tags: [react, frontend] +--- + +## Components + +- Always use named exports. Never use default exports. + This applies to all files: components, utilities, hooks, and types. + +- Use PascalCase for component files (\`UserProfile.tsx\`). + +## Hooks + +- Never call hooks conditionally. Always call hooks at the top + level of your component or custom hook. + +- Prefix custom hooks with \`use\` (\`useAuth.ts\`). + +## General + +- Colocate related files: component, hook, utils, and types + in the same feature folder. +`; + +describe('converter', () => { + describe('frontmatter parsing', () => { + it('extracts name, description, version, scope, tags', () => { + const result = convert(VALID_MD, 'javascript', 'react'); + assert.equal(result.name, 'react'); + assert.equal(result.description, 'React conventions for AI coding agents'); + assert.equal(result.version, '0.1.0'); + assert.equal(result.scope, 'conventions'); + assert.deepEqual(result.tags, ['react', 'frontend']); + }); + + it('throws on missing frontmatter', () => { + assert.throws(() => convert('# No frontmatter', 'a', 'b'), /missing YAML frontmatter/); + }); + + it('throws on missing name', () => { + const md = `--- +description: "test" +version: "0.1.0" +scope: conventions +--- + +- rule +`; + assert.throws(() => convert(md, 'a', 'b'), /missing "name"/); + }); + + it('throws on missing description', () => { + const md = `--- +name: test +version: "0.1.0" +scope: conventions +--- + +- rule +`; + assert.throws(() => convert(md, 'a', 'b'), /missing "description"/); + }); + + it('throws on invalid scope', () => { + const md = `--- +name: test +description: "test" +version: "0.1.0" +scope: INVALID +--- + +- rule +`; + assert.throws(() => convert(md, 'a', 'b'), /invalid scope/); + }); + }); + + describe('bullet extraction', () => { + it('extracts first-level bullets as individual rules', () => { + const result = convert(VALID_MD, 'javascript', 'react'); + assert.equal(result.rules.length, 5); + }); + + it('handles indented continuation as part of the same rule', () => { + const result = convert(VALID_MD, 'javascript', 'react'); + const first = ruleAt(result, 0); + assert.ok(first.content.includes('Always use named exports')); + assert.ok(first.content.includes('This applies to all files')); + }); + + it('extracts single-line bullets correctly', () => { + const result = convert(VALID_MD, 'javascript', 'react'); + const second = ruleAt(result, 1); + assert.ok(second.content.includes('PascalCase')); + }); + }); + + describe('ID generation with headings', () => { + it('generates IDs with heading slug', () => { + const result = convert(VALID_MD, 'javascript', 'react'); + assert.equal(ruleAt(result, 0).id, 'pulled-javascript-react-components-0'); + assert.equal(ruleAt(result, 1).id, 'pulled-javascript-react-components-1'); + assert.equal(ruleAt(result, 2).id, 'pulled-javascript-react-hooks-0'); + assert.equal(ruleAt(result, 3).id, 'pulled-javascript-react-hooks-1'); + assert.equal(ruleAt(result, 4).id, 'pulled-javascript-react-general-0'); + }); + + it('resets index per heading', () => { + const result = convert(VALID_MD, 'javascript', 'react'); + assert.equal(ruleAt(result, 2).id, 'pulled-javascript-react-hooks-0'); + assert.equal(ruleAt(result, 4).id, 'pulled-javascript-react-general-0'); + }); + }); + + describe('ID generation without headings', () => { + it('generates IDs without heading segment when no headings exist', () => { + const md = `--- +name: simple +description: "Simple rules" +version: "0.1.0" +scope: conventions +--- + +- First rule content. + +- Second rule content. + +- Third rule content. +`; + const result = convert(md, 'typescript', 'simple'); + assert.equal(ruleAt(result, 0).id, 'pulled-typescript-simple-0'); + assert.equal(ruleAt(result, 1).id, 'pulled-typescript-simple-1'); + assert.equal(ruleAt(result, 2).id, 'pulled-typescript-simple-2'); + }); + }); + + describe('multiline bullets', () => { + it('handles multi-line continuation correctly', () => { + const md = `--- +name: multi +description: "Multiline test" +version: "0.1.0" +scope: conventions +--- + +- First line of rule. + Second line of rule. + Third line of rule. + +- Another rule. +`; + const result = convert(md, 'test', 'multi'); + assert.equal(result.rules.length, 2); + assert.ok(ruleAt(result, 0).content.includes('First line')); + assert.ok(ruleAt(result, 0).content.includes('Second line')); + assert.ok(ruleAt(result, 0).content.includes('Third line')); + }); + }); + + describe('rule properties', () => { + it('all rules have severity error', () => { + const result = convert(VALID_MD, 'javascript', 'react'); + for (const rule of result.rules) { + assert.equal(rule.severity, 'error'); + } + }); + + it('all rules inherit tags from frontmatter', () => { + const result = convert(VALID_MD, 'javascript', 'react'); + for (const rule of result.rules) { + assert.deepEqual(rule.tags, ['react', 'frontend']); + } + }); + + it('all rules have source set to category/name', () => { + const result = convert(VALID_MD, 'javascript', 'react'); + for (const rule of result.rules) { + assert.equal(rule.source, 'javascript/react'); + } + }); + }); +}); diff --git a/packages/cli/tests/core/parser.edge.test.ts b/packages/cli/tests/core/parser.edge.test.ts new file mode 100644 index 0000000..a01787f --- /dev/null +++ b/packages/cli/tests/core/parser.edge.test.ts @@ -0,0 +1,187 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, mkdir, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { stringify } from 'yaml'; +import { readConfig, readRules } from '../../src/core/parser.js'; + +const BASE_CONFIG = `version: "0.1" +project: + name: test-project +tools: + - claude +mode: copy +blocks: []`; + +async function createProject(dir: string, configYaml: string): Promise { + await mkdir(join(dir, '.dwf', 'rules'), { recursive: true }); + await writeFile(join(dir, '.dwf', 'config.yml'), configYaml, 'utf-8'); +} + +describe('parser edge cases', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'dwf-parser-edge-')); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + describe('readConfig pulled entries', () => { + it('filters entry without path', async () => { + await createProject(tempDir, `${BASE_CONFIG} +pulled: + - version: "0.1.0" + pulled_at: "2026-01-01T00:00:00Z" +`); + const config = await readConfig(tempDir); + assert.equal(config.pulled.length, 0); + }); + + it('filters null in pulled array', async () => { + await createProject(tempDir, `${BASE_CONFIG} +pulled: + - path: "typescript/strict" + version: "0.1.0" + pulled_at: "2026-01-01T00:00:00Z" + - null +`); + const config = await readConfig(tempDir); + assert.equal(config.pulled.length, 1); + }); + + it('defaults to [] when pulled is a string', async () => { + await createProject(tempDir, `${BASE_CONFIG} +pulled: "not-an-array" +`); + const config = await readConfig(tempDir); + assert.deepEqual(config.pulled, []); + }); + + it('defaults to [] when pulled is a number', async () => { + await createProject(tempDir, `${BASE_CONFIG} +pulled: 42 +`); + const config = await readConfig(tempDir); + assert.deepEqual(config.pulled, []); + }); + + it('keeps entry without version but with path (version defaults to empty)', async () => { + await createProject(tempDir, `${BASE_CONFIG} +pulled: + - path: "typescript/strict" + pulled_at: "2026-01-01T00:00:00Z" +`); + const config = await readConfig(tempDir); + assert.equal(config.pulled.length, 1); + assert.equal(config.pulled[0]?.version, ''); + }); + }); + + describe('normalizeRule via readRules', () => { + it('skips rule without id', async () => { + await createProject(tempDir, BASE_CONFIG); + const doc = { + scope: 'conventions', + rules: [{ severity: 'error', content: 'No id rule' }], + }; + await writeFile(join(tempDir, '.dwf', 'rules', 'test.yml'), stringify(doc), 'utf-8'); + + const rules = await readRules(tempDir); + assert.equal(rules.length, 0); + }); + + it('skips rule without content', async () => { + await createProject(tempDir, BASE_CONFIG); + const doc = { + scope: 'conventions', + rules: [{ id: 'no-content', severity: 'error' }], + }; + await writeFile(join(tempDir, '.dwf', 'rules', 'test.yml'), stringify(doc), 'utf-8'); + + const rules = await readRules(tempDir); + assert.equal(rules.length, 0); + }); + + it('skips rule with invalid severity', async () => { + await createProject(tempDir, BASE_CONFIG); + const doc = { + scope: 'conventions', + rules: [{ id: 'bad-sev', severity: 'critical', content: 'Rule content' }], + }; + await writeFile(join(tempDir, '.dwf', 'rules', 'test.yml'), stringify(doc), 'utf-8'); + + const rules = await readRules(tempDir); + assert.equal(rules.length, 0); + }); + + it('respects enabled: false explicitly', async () => { + await createProject(tempDir, BASE_CONFIG); + const doc = { + scope: 'conventions', + rules: [{ id: 'disabled-rule', severity: 'error', content: 'Disabled', enabled: false }], + }; + await writeFile(join(tempDir, '.dwf', 'rules', 'test.yml'), stringify(doc), 'utf-8'); + + const rules = await readRules(tempDir); + assert.equal(rules.length, 1); + assert.equal(rules[0]?.enabled, false); + }); + + it('trims trailing whitespace from content', async () => { + await createProject(tempDir, BASE_CONFIG); + const doc = { + scope: 'conventions', + rules: [{ id: 'trim-test', severity: 'error', content: 'Rule with trailing \n' }], + }; + await writeFile(join(tempDir, '.dwf', 'rules', 'test.yml'), stringify(doc), 'utf-8'); + + const rules = await readRules(tempDir); + assert.equal(rules.length, 1); + assert.equal(rules[0]?.content, 'Rule with trailing'); + }); + }); + + describe('readRules file handling', () => { + it('skips file without rules array', async () => { + await createProject(tempDir, BASE_CONFIG); + await writeFile( + join(tempDir, '.dwf', 'rules', 'no-rules.yml'), + `scope: conventions\ndata: something\n`, + 'utf-8', + ); + + const rules = await readRules(tempDir); + assert.equal(rules.length, 0); + }); + + it('throws on invalid YAML (no try/catch in readRules parse)', async () => { + await createProject(tempDir, BASE_CONFIG); + await writeFile( + join(tempDir, '.dwf', 'rules', 'broken.yml'), + ':\ninvalid: [yaml: {broken', + 'utf-8', + ); + + // readRules does parse() without try/catch — invalid YAML causes unhandled throw + await assert.rejects( + () => readRules(tempDir), + ); + }); + + it('skips rules with invalid scope and logs warning', async () => { + await createProject(tempDir, BASE_CONFIG); + const doc = { + scope: 'INVALID_SCOPE', + rules: [{ id: 'test', severity: 'error', content: 'Should be skipped' }], + }; + await writeFile(join(tempDir, '.dwf', 'rules', 'bad-scope.yml'), stringify(doc), 'utf-8'); + + const rules = await readRules(tempDir); + assert.equal(rules.length, 0); + }); + }); +}); diff --git a/packages/cli/tests/utils/cache.edge.test.ts b/packages/cli/tests/utils/cache.edge.test.ts new file mode 100644 index 0000000..34b83ad --- /dev/null +++ b/packages/cli/tests/utils/cache.edge.test.ts @@ -0,0 +1,102 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, mkdir, writeFile, readFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { get, set, getFromDisk } from '../../src/utils/cache.js'; + +describe('cache edge cases', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'dwf-cache-edge-')); + await mkdir(join(tempDir, '.dwf', '.cache'), { recursive: true }); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + describe('TTL expiration', () => { + it('get() returns null when entry timestamp is expired (>1h)', () => { + const store = { + 'old-key': { + data: 'stale value', + timestamp: Date.now() - 3_700_000, // > 1 hour ago + }, + }; + + const result = get(tempDir, 'old-key', store); + assert.equal(result, null); + }); + + it('getFromDisk returns null when disk entry is expired', async () => { + const store = { + 'expired-key': { + data: { foo: 'bar' }, + timestamp: Date.now() - 4_000_000, // expired + }, + }; + await writeFile( + join(tempDir, '.dwf', '.cache', 'registry.json'), + JSON.stringify(store), + 'utf-8', + ); + + const result = await getFromDisk<{ foo: string }>(tempDir, 'expired-key'); + assert.equal(result, null); + }); + }); + + describe('malformed store', () => { + it('readStore returns {} when cache file is a JSON array', async () => { + await writeFile( + join(tempDir, '.dwf', '.cache', 'registry.json'), + JSON.stringify([1, 2, 3]), + 'utf-8', + ); + + const result = await getFromDisk(tempDir, 'any-key'); + assert.equal(result, null); + }); + }); + + describe('directory creation', () => { + it('set() creates .dwf/.cache/ directory if it does not exist', async () => { + const freshDir = await mkdtemp(join(tmpdir(), 'dwf-cache-fresh-')); + // No .dwf/.cache directory exists + + await set(freshDir, 'new-key', 'new-value'); + + const result = await getFromDisk(freshDir, 'new-key'); + assert.equal(result, 'new-value'); + + await rm(freshDir, { recursive: true, force: true }); + }); + }); + + describe('timestamp correctness', () => { + it('stored timestamp is within expected range', async () => { + const before = Date.now(); + await set(tempDir, 'ts-key', 'value'); + const after = Date.now(); + + const raw = await readFile( + join(tempDir, '.dwf', '.cache', 'registry.json'), + 'utf-8', + ); + const store = JSON.parse(raw) as Record; + const entry = store['ts-key']; + assert.ok(entry); + assert.ok(entry.timestamp >= before, `timestamp ${String(entry.timestamp)} should be >= ${String(before)}`); + assert.ok(entry.timestamp <= after, `timestamp ${String(entry.timestamp)} should be <= ${String(after)}`); + }); + }); + + describe('empty store', () => { + it('get() with empty store returns null', () => { + const result = get(tempDir, 'missing', {}); + assert.equal(result, null); + }); + }); +}); diff --git a/packages/cli/tests/utils/cache.test.ts b/packages/cli/tests/utils/cache.test.ts new file mode 100644 index 0000000..d326927 --- /dev/null +++ b/packages/cli/tests/utils/cache.test.ts @@ -0,0 +1,62 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { set, getFromDisk } from '../../src/utils/cache.js'; + +describe('cache', () => { + let tempDir: string; + + beforeEach(async () => { + tempDir = await mkdtemp(join(tmpdir(), 'dwf-cache-test-')); + // Create .dwf directory structure + const { mkdir } = await import('node:fs/promises'); + await mkdir(join(tempDir, '.dwf', '.cache'), { recursive: true }); + }); + + afterEach(async () => { + await rm(tempDir, { recursive: true, force: true }); + }); + + it('set + get returns stored value', async () => { + await set(tempDir, 'test-key', { foo: 'bar' }); + const result = await getFromDisk<{ foo: string }>(tempDir, 'test-key'); + assert.deepEqual(result, { foo: 'bar' }); + }); + + it('get returns null on cache miss', async () => { + const result = await getFromDisk(tempDir, 'nonexistent'); + assert.equal(result, null); + }); + + it('get returns null when cache file does not exist', async () => { + const emptyDir = await mkdtemp(join(tmpdir(), 'dwf-cache-empty-')); + const result = await getFromDisk(emptyDir, 'key'); + assert.equal(result, null); + await rm(emptyDir, { recursive: true, force: true }); + }); + + it('handles corrupted cache file gracefully', async () => { + const { writeFile } = await import('node:fs/promises'); + await writeFile(join(tempDir, '.dwf', '.cache', 'registry.json'), 'not json!!!', 'utf-8'); + const result = await getFromDisk(tempDir, 'key'); + assert.equal(result, null); + }); + + it('overwrites existing key', async () => { + await set(tempDir, 'key', 'first'); + await set(tempDir, 'key', 'second'); + const result = await getFromDisk(tempDir, 'key'); + assert.equal(result, 'second'); + }); + + it('stores multiple keys independently', async () => { + await set(tempDir, 'a', 1); + await set(tempDir, 'b', 2); + const a = await getFromDisk(tempDir, 'a'); + const b = await getFromDisk(tempDir, 'b'); + assert.equal(a, 1); + assert.equal(b, 2); + }); +}); diff --git a/packages/cli/tests/utils/github.edge.test.ts b/packages/cli/tests/utils/github.edge.test.ts new file mode 100644 index 0000000..e61babf --- /dev/null +++ b/packages/cli/tests/utils/github.edge.test.ts @@ -0,0 +1,105 @@ +import { describe, it, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { fetchRawContent, listDirectory, GitHubError } from '../../src/utils/github.js'; + +describe('github edge cases', () => { + const originalFetch = globalThis.fetch; + + afterEach(() => { + globalThis.fetch = originalFetch; + }); + + describe('GitHubError constructor', () => { + it('sets name, statusCode, and message correctly', () => { + const err = new GitHubError('something went wrong', 502); + assert.equal(err.name, 'GitHubError'); + assert.equal(err.statusCode, 502); + assert.equal(err.message, 'something went wrong'); + assert.ok(err instanceof Error); + }); + }); + + describe('fetchRawContent edge cases', () => { + it('throws GitHubError with status 500 on server error', async () => { + globalThis.fetch = async () => + new Response('Internal Server Error', { status: 500 }); + + await assert.rejects( + () => fetchRawContent('test/rule'), + (err: unknown) => { + assert.ok(err instanceof GitHubError); + assert.equal(err.statusCode, 500); + assert.ok(err.message.includes('HTTP 500')); + return true; + }, + ); + }); + + it('throws GitHubError with status 401 on unauthorized', async () => { + globalThis.fetch = async () => + new Response('Unauthorized', { status: 401 }); + + await assert.rejects( + () => fetchRawContent('test/rule'), + (err: unknown) => { + assert.ok(err instanceof GitHubError); + assert.equal(err.statusCode, 401); + return true; + }, + ); + }); + + it('wraps non-Error throw (string) from fetch into GitHubError', async () => { + globalThis.fetch = async () => { + throw 'connection refused'; + }; + + await assert.rejects( + () => fetchRawContent('test/rule'), + (err: unknown) => { + assert.ok(err instanceof GitHubError); + assert.ok(err.message.includes('connection refused')); + assert.equal(err.statusCode, 0); + return true; + }, + ); + }); + + it('returns empty string when response body is empty', async () => { + globalThis.fetch = async () => + new Response('', { status: 200 }); + + const content = await fetchRawContent('test/rule'); + assert.equal(content, ''); + }); + }); + + describe('listDirectory edge cases', () => { + it('returns empty array when GitHub responds with []', async () => { + globalThis.fetch = async () => + new Response(JSON.stringify([]), { + status: 200, + headers: { 'content-type': 'application/json' }, + }); + + const entries = await listDirectory('empty-category'); + assert.deepEqual(entries, []); + }); + + it('builds URL without extra segment when path is omitted', async () => { + let capturedUrl = ''; + globalThis.fetch = async (input: string | URL | Request) => { + capturedUrl = typeof input === 'string' ? input : String(input); + return new Response(JSON.stringify([]), { + status: 200, + headers: { 'content-type': 'application/json' }, + }); + }; + + await listDirectory(); + // URL should end with /rules?ref=... (no extra path segment) + assert.ok(capturedUrl.includes('/contents/content/rules?ref=')); + assert.ok(!capturedUrl.includes('/rules/?ref=')); + }); + }); +}); diff --git a/packages/cli/tests/utils/github.test.ts b/packages/cli/tests/utils/github.test.ts new file mode 100644 index 0000000..c99ffeb --- /dev/null +++ b/packages/cli/tests/utils/github.test.ts @@ -0,0 +1,131 @@ +import { describe, it, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { fetchRawContent, listDirectory } from '../../src/utils/github.js'; + +describe('github helpers', () => { + const originalFetch = globalThis.fetch; + + afterEach(() => { + globalThis.fetch = originalFetch; + }); + + describe('fetchRawContent', () => { + it('returns content on success', async () => { + globalThis.fetch = async () => + new Response('# Hello\n- rule 1', { status: 200 }); + + const content = await fetchRawContent('typescript/strict'); + assert.equal(content, '# Hello\n- rule 1'); + }); + + it('throws "not found" on 404', async () => { + globalThis.fetch = async () => + new Response('Not Found', { status: 404 }); + + await assert.rejects( + () => fetchRawContent('unknown/missing'), + (err: Error) => { + assert.ok(err.message.includes('not found')); + return true; + }, + ); + }); + + it('throws rate limit error on 403', async () => { + globalThis.fetch = async () => + new Response('Forbidden', { status: 403 }); + + await assert.rejects( + () => fetchRawContent('typescript/strict'), + (err: Error) => { + assert.ok(err.message.includes('rate limit')); + return true; + }, + ); + }); + + it('throws on network error', async () => { + globalThis.fetch = async () => { + throw new Error('ENOTFOUND'); + }; + + await assert.rejects( + () => fetchRawContent('typescript/strict'), + (err: Error) => { + assert.ok(err.message.includes('Network error')); + return true; + }, + ); + }); + }); + + describe('listDirectory', () => { + it('parses directory listing correctly', async () => { + globalThis.fetch = async () => + new Response( + JSON.stringify([ + { name: 'strict.md', type: 'file' }, + { name: 'conventions.md', type: 'file' }, + { name: 'subfolder', type: 'dir' }, + ]), + { status: 200, headers: { 'content-type': 'application/json' } }, + ); + + const entries = await listDirectory('typescript'); + assert.equal(entries.length, 3); + + const first = entries[0]; + const second = entries[1]; + const third = entries[2]; + assert.ok(first); + assert.ok(second); + assert.ok(third); + assert.deepEqual(first, { name: 'strict', type: 'file' }); + assert.deepEqual(second, { name: 'conventions', type: 'file' }); + assert.deepEqual(third, { name: 'subfolder', type: 'dir' }); + }); + + it('strips .md extension from file names', async () => { + globalThis.fetch = async () => + new Response( + JSON.stringify([{ name: 'react.md', type: 'file' }]), + { status: 200, headers: { 'content-type': 'application/json' } }, + ); + + const entries = await listDirectory('javascript'); + const first = entries[0]; + assert.ok(first); + assert.equal(first.name, 'react'); + }); + + it('filters out non-file/dir entries', async () => { + globalThis.fetch = async () => + new Response( + JSON.stringify([ + { name: 'file.md', type: 'file' }, + { name: 'symlink', type: 'symlink' }, + ]), + { status: 200, headers: { 'content-type': 'application/json' } }, + ); + + const entries = await listDirectory(); + assert.equal(entries.length, 1); + const first = entries[0]; + assert.ok(first); + assert.equal(first.type, 'file'); + }); + + it('throws on 404', async () => { + globalThis.fetch = async () => + new Response('Not Found', { status: 404 }); + + await assert.rejects( + () => listDirectory('nonexistent'), + (err: Error) => { + assert.ok(err.message.includes('not found')); + return true; + }, + ); + }); + }); +});